Reviewers: danno,
Description:
Refactoring: Make predictable code flag handling architecture-independent.
Please review this at http://codereview.chromium.org/11359127/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files:
M src/arm/assembler-arm.h
M src/arm/assembler-arm.cc
M src/assembler.h
M src/assembler.cc
M src/ia32/assembler-ia32.h
M src/x64/assembler-x64.h
M src/x64/assembler-x64.cc
Index: src/arm/assembler-arm.cc
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index
80abafdefa348addcb241a14c0871dc207d488b5..fc733bcfee64f9177193cbe7a091d70a03eb0bb9
100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -326,8 +326,7 @@ Assembler::Assembler(Isolate* arg_isolate, void*
buffer, int buffer_size)
: AssemblerBase(arg_isolate),
recorded_ast_id_(TypeFeedbackId::None()),
positions_recorder_(this),
- emit_debug_code_(FLAG_debug_code),
- predictable_code_size_(false) {
+ emit_debug_code_(FLAG_debug_code) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
Index: src/arm/assembler-arm.h
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index
3fab20bcb15734aba17e2406382ba68a603041ab..3a264acc3852c120af36dd2b2dcf85e55c901e45
100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -652,11 +652,6 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
- // Avoids using instructions that vary in size in unpredictable ways
between
- // the snapshot and the running VM. This is needed by the full compiler
so
- // that it can recompile code with debug support and fix the PC.
- void set_predictable_code_size(bool value) { predictable_code_size_ =
value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -1185,8 +1180,6 @@ class Assembler : public AssemblerBase {
// Jump unconditionally to given label.
void jmp(Label* L) { b(L, al); }
- bool predictable_code_size() const { return predictable_code_size_; }
-
static bool use_immediate_embedded_pointer_loads(
const Assembler* assembler) {
#ifdef USE_BLX
@@ -1499,7 +1492,6 @@ class Assembler : public AssemblerBase {
PositionsRecorder positions_recorder_;
bool emit_debug_code_;
- bool predictable_code_size_;
friend class PositionsRecorder;
friend class EnsureSpace;
@@ -1514,26 +1506,6 @@ class EnsureSpace BASE_EMBEDDED {
};
-class PredictableCodeSizeScope {
- public:
- explicit PredictableCodeSizeScope(Assembler* assembler)
- : asm_(assembler) {
- old_value_ = assembler->predictable_code_size();
- assembler->set_predictable_code_size(true);
- }
-
- ~PredictableCodeSizeScope() {
- if (!old_value_) {
- asm_->set_predictable_code_size(false);
- }
- }
-
- private:
- Assembler* asm_;
- bool old_value_;
-};
-
-
} } // namespace v8::internal
#endif // V8_ARM_ASSEMBLER_ARM_H_
Index: src/assembler.cc
diff --git a/src/assembler.cc b/src/assembler.cc
index
0dbdf0061dc27102aa026b36c1b62ea8e903e137..57032b8a8571741162d54dc54dba7179b3759208
100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -108,7 +108,8 @@ const char* const RelocInfo::kFillerCommentString
= "DEOPTIMIZATION PADDING";
AssemblerBase::AssemblerBase(Isolate* isolate)
: isolate_(isolate),
- jit_cookie_(0) {
+ jit_cookie_(0),
+ predictable_code_size_(false) {
if (FLAG_mask_constants_with_cookie && isolate != NULL) {
jit_cookie_ = V8::RandomPrivate(isolate);
}
Index: src/assembler.h
diff --git a/src/assembler.h b/src/assembler.h
index
efa87c10c86a95c23d2ecbe3a520dd66eae9ad8f..9d15b5110c2583ad10c73f252f49a55c338a3584
100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -59,7 +59,10 @@ class AssemblerBase: public Malloced {
explicit AssemblerBase(Isolate* isolate);
Isolate* isolate() const { return isolate_; }
- int jit_cookie() { return jit_cookie_; }
+ int jit_cookie() const { return jit_cookie_; }
+
+ bool predictable_code_size() const { return predictable_code_size_; }
+ void set_predictable_code_size(bool value) { predictable_code_size_ =
value; }
// Overwrite a host NaN with a quiet target NaN. Used by mksnapshot for
// cross-snapshotting.
@@ -68,6 +71,27 @@ class AssemblerBase: public Malloced {
private:
Isolate* isolate_;
int jit_cookie_;
+ bool predictable_code_size_;
+};
+
+
+// Avoids using instructions that vary in size in unpredictable ways
between the
+// snapshot and the running VM.
+class PredictableCodeSizeScope {
+ public:
+ explicit PredictableCodeSizeScope(AssemblerBase* assembler)
+ : assembler_(assembler) {
+ old_value_ = assembler_->predictable_code_size();
+ assembler_->set_predictable_code_size(true);
+ }
+
+ ~PredictableCodeSizeScope() {
+ assembler_->set_predictable_code_size(old_value_);
+ }
+
+ private:
+ AssemblerBase* assembler_;
+ bool old_value_;
};
Index: src/ia32/assembler-ia32.h
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index
6e079dee9f004cb7068f7db3a6ab0cff3e191c7e..017abccad9bce467837ca3472bf69c5b79e5c964
100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -587,11 +587,6 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
- // Avoids using instructions that vary in size in unpredictable ways
between
- // the snapshot and the running VM. This is needed by the full compiler
so
- // that it can recompile code with debug support and fix the PC.
- void set_predictable_code_size(bool value) { predictable_code_size_ =
value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -1126,7 +1121,6 @@ class Assembler : public AssemblerBase {
protected:
bool emit_debug_code() const { return emit_debug_code_; }
- bool predictable_code_size() const { return predictable_code_size_ ; }
void movsd(XMMRegister dst, const Operand& src);
void movsd(const Operand& dst, XMMRegister src);
@@ -1202,7 +1196,6 @@ class Assembler : public AssemblerBase {
PositionsRecorder positions_recorder_;
bool emit_debug_code_;
- bool predictable_code_size_;
friend class PositionsRecorder;
};
Index: src/x64/assembler-x64.cc
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index
f2c05d621202ba2330e9c4a0c12a58296cb037ed..0700312ad1b91239c4ad0e3ce3ed54cc84086e16
100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -350,8 +350,7 @@ Assembler::Assembler(Isolate* arg_isolate, void*
buffer, int buffer_size)
: AssemblerBase(arg_isolate),
code_targets_(100),
positions_recorder_(this),
- emit_debug_code_(FLAG_debug_code),
- predictable_code_size_(false) {
+ emit_debug_code_(FLAG_debug_code) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
@@ -1238,13 +1237,13 @@ void Assembler::j(Condition cc, Label* L,
Label::Distance distance) {
// Determine whether we can use 1-byte offsets for backwards branches,
// which have a max range of 128 bytes.
- // We also need to check the predictable_code_size_ flag here, because
- // on x64, when the full code generator recompiles code for debugging,
some
- // places need to be padded out to a certain size. The debugger is
keeping
- // track of how often it did this so that it can adjust return
addresses on
- // the stack, but if the size of jump instructions can also change,
that's
- // not enough and the calculated offsets would be incorrect.
- if (is_int8(offs - short_size) && !predictable_code_size_) {
+ // We also need to check predictable_code_size() flag here, because on
x64,
+ // when the full code generator recompiles code for debugging, some
places
+ // need to be padded out to a certain size. The debugger is keeping
track of
+ // how often it did this so that it can adjust return addresses on the
+ // stack, but if the size of jump instructions can also change, that's
not
+ // enough and the calculated offsets would be incorrect.
+ if (is_int8(offs - short_size) && !predictable_code_size()) {
// 0111 tttn #8-bit disp.
emit(0x70 | cc);
emit((offs - short_size) & 0xFF);
@@ -1301,7 +1300,7 @@ void Assembler::jmp(Label* L, Label::Distance
distance) {
if (L->is_bound()) {
int offs = L->pos() - pc_offset() - 1;
ASSERT(offs <= 0);
- if (is_int8(offs - short_size) && !predictable_code_size_) {
+ if (is_int8(offs - short_size) && !predictable_code_size()) {
// 1110 1011 #8-bit disp.
emit(0xEB);
emit((offs - short_size) & 0xFF);
Index: src/x64/assembler-x64.h
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index
cd10d723ffcd4ad549852865f8b8b1bceaa4c00c..f77be7a9d0f7c3c2de60bb028c2170e74eb54a7d
100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -561,11 +561,6 @@ class Assembler : public AssemblerBase {
// Overrides the default provided by FLAG_debug_code.
void set_emit_debug_code(bool value) { emit_debug_code_ = value; }
- // Avoids using instructions that vary in size in unpredictable ways
between
- // the snapshot and the running VM. This is needed by the full compiler
so
- // that it can recompile code with debug support and fix the PC.
- void set_predictable_code_size(bool value) { predictable_code_size_ =
value; }
-
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
@@ -1451,7 +1446,6 @@ class Assembler : public AssemblerBase {
protected:
bool emit_debug_code() const { return emit_debug_code_; }
- bool predictable_code_size() const { return predictable_code_size_; }
private:
byte* addr_at(int pos) { return buffer_ + pos; }
@@ -1656,7 +1650,6 @@ class Assembler : public AssemblerBase {
PositionsRecorder positions_recorder_;
bool emit_debug_code_;
- bool predictable_code_size_;
friend class PositionsRecorder;
};
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev