Author: Maciej Fijalkowski <[email protected]>
Branch: optresult
Changeset: r76180:694e29ab1d9a
Date: 2015-02-27 18:32 +0200
http://bitbucket.org/pypy/pypy/changeset/694e29ab1d9a/

Log:    (fijal, arigo) start rewriting the class hierarchy for XxxInfo

diff --git a/rpython/jit/metainterp/optimizeopt/info.py 
b/rpython/jit/metainterp/optimizeopt/info.py
--- a/rpython/jit/metainterp/optimizeopt/info.py
+++ b/rpython/jit/metainterp/optimizeopt/info.py
@@ -21,7 +21,49 @@
     def force_box(self, op, optforce):
         return op
 
-class PtrOptInfo(AbstractInfo):
+    
+class PtrInfo(AbstractInfo):
+    _attrs_ = ()
+
+    
+class NonNullPtrInfo(PtrInfo):
+    _attrs_ = ()
+
+    
+class InstancePtrInfo(NonNullPtrInfo):
+    _attrs_ = ('_known_class', '_is_virtual', '_fields')
+
+    def __init__(self, known_class=None, is_virtual=False):
+        self._known_class = known_class
+        self._is_virtual = is_virtual
+
+    def force_box(self, op, optforce):
+        if self._is_virtual:
+            op.set_forwarded(None)
+            optforce.emit_operation(op)
+            newop = optforce.getlastop()
+            op.set_forwarded(newop)
+            newop.set_forwarded(self)
+            self._is_virtual = False
+            return newop
+        return op
+
+    def get_known_class(self):
+        return self._known_class
+    
+class StructPtrInfo(NonNullPtrInfo):
+    _attrs_ = ('is_virtual', '_fields')
+
+    
+class ArrayPtrInfo(NonNullPtrInfo):
+    _attrs_ = ('is_virtual', 'length', '_items')
+
+    
+class StrPtrInfo(NonNullPtrInfo):
+    _attrs_ = ()
+
+    
+class XPtrOptInfo(AbstractInfo):
     _attrs_ = ('_tag', 'known_class', 'last_guard_pos', 'lenbound')
     is_info_class = True
 
diff --git a/rpython/jit/metainterp/optimizeopt/optimizer.py 
b/rpython/jit/metainterp/optimizeopt/optimizer.py
--- a/rpython/jit/metainterp/optimizeopt/optimizer.py
+++ b/rpython/jit/metainterp/optimizeopt/optimizer.py
@@ -6,7 +6,7 @@
      IntUnbounded, ConstIntBound
 from rpython.jit.metainterp.optimizeopt.util import make_dispatcher_method
 from rpython.jit.metainterp.resoperation import rop, AbstractResOp, GuardResOp
-from rpython.jit.metainterp.optimizeopt.info import PtrOptInfo
+from rpython.jit.metainterp.optimizeopt import info
 from rpython.jit.metainterp.typesystem import llhelper
 from rpython.rlib.objectmodel import specialize, we_are_translated
 
@@ -293,6 +293,15 @@
             return self.getintbound(op).getnullness()
         xxxx
 
+    def make_constant_class(self, op, class_const):
+        op = self.get_box_replacement(op)
+        opinfo = op.get_forwarded()
+        if opinfo is not None:
+            return opinfo
+        opinfo = info.InstancePtrInfo(class_const)
+        op.set_forwarded(opinfo)
+        return opinfo
+
     def getptrinfo(self, op):
         assert op.type == 'r'
         op = self.get_box_replacement(op)
@@ -301,11 +310,9 @@
             xxx
         fw = op.get_forwarded()
         if fw is not None:
-            assert isinstance(fw, PtrOptInfo)
+            assert isinstance(fw, info.PtrInfo)
             return fw
-        ptrinfo = PtrOptInfo()
-        op.set_forwarded(ptrinfo)
-        return ptrinfo
+        return None
 
     def get_box_replacement(self, op):
         return self.optimizer.get_box_replacement(op)
diff --git a/rpython/jit/metainterp/optimizeopt/rewrite.py 
b/rpython/jit/metainterp/optimizeopt/rewrite.py
--- a/rpython/jit/metainterp/optimizeopt/rewrite.py
+++ b/rpython/jit/metainterp/optimizeopt/rewrite.py
@@ -7,7 +7,6 @@
 from rpython.jit.metainterp.optimizeopt.intutils import IntBound
 from rpython.jit.metainterp.optimizeopt.optimizer import (Optimization, 
REMOVED,
     CONST_0, CONST_1, INFO_NONNULL, INFO_NULL)
-from rpython.jit.metainterp.optimizeopt.info import PtrOptInfo
 from rpython.jit.metainterp.optimizeopt.util import _findall, 
make_dispatcher_method
 from rpython.jit.metainterp.resoperation import rop, ResOperation, opclasses,\
      OpHelpers
@@ -347,41 +346,43 @@
         value.make_constant_class(None, expectedclassbox)
 
     def optimize_GUARD_CLASS(self, op):
-        value = self.getptrinfo(op.getarg(0))
         expectedclassbox = op.getarg(1)
+        info = self.getptrinfo(op.getarg(0))
         assert isinstance(expectedclassbox, Const)
-        realclassbox = value.get_constant_class(self.optimizer.cpu)
-        if realclassbox is not None:
-            if realclassbox.same_constant(expectedclassbox):
-                return
-            r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op)
-            raise InvalidLoop('A GUARD_CLASS (%s) was proven to always fail'
-                              % r)
-        old_guard_op = value.get_last_guard(self.optimizer)
-        if old_guard_op and not isinstance(old_guard_op.getdescr(),
-                                           compile.ResumeAtPositionDescr):
-            # there already has been a guard_nonnull or guard_class or
-            # guard_nonnull_class on this value.
-            if old_guard_op.getopnum() == rop.GUARD_NONNULL:
-                # it was a guard_nonnull, which we replace with a
-                # guard_nonnull_class.
-                descr = compile.ResumeGuardNonnullClassDescr()
-                op = old_guard_op.copy_and_change (rop.GUARD_NONNULL_CLASS,
-                            args = [old_guard_op.getarg(0), op.getarg(1)],
-                            descr=descr)
-                # Note: we give explicitly a new descr for 'op'; this is why 
the
-                # old descr must not be ResumeAtPositionDescr (checked above).
-                # Better-safe-than-sorry but it should never occur: we should
-                # not put in short preambles guard_nonnull and guard_class
-                # on the same box.
-                self.optimizer.replace_guard(op, value)
-                # not emitting the guard, so we have to pass None to
-                # make_constant_class, so last_guard_pos is not updated
-                self.emit_operation(op)
-                value.make_constant_class(None, expectedclassbox)
-                return
+        if info is not None:
+            realclassbox = info.get_known_class()
+            if realclassbox is not None:
+                if realclassbox.same_constant(expectedclassbox):
+                    return
+                r = self.optimizer.metainterp_sd.logger_ops.repr_of_resop(op)
+                raise InvalidLoop('A GUARD_CLASS (%s) was proven to always 
fail'
+                                  % r)
+            old_guard_op = info.get_last_guard(self.optimizer)
+            if old_guard_op and not isinstance(old_guard_op.getdescr(),
+                                               compile.ResumeAtPositionDescr):
+                xxx
+                # there already has been a guard_nonnull or guard_class or
+                # guard_nonnull_class on this value.
+                if old_guard_op.getopnum() == rop.GUARD_NONNULL:
+                    # it was a guard_nonnull, which we replace with a
+                    # guard_nonnull_class.
+                    descr = compile.ResumeGuardNonnullClassDescr()
+                    op = old_guard_op.copy_and_change (rop.GUARD_NONNULL_CLASS,
+                                args = [old_guard_op.getarg(0), op.getarg(1)],
+                                descr=descr)
+                    # Note: we give explicitly a new descr for 'op'; this is 
why the
+                    # old descr must not be ResumeAtPositionDescr (checked 
above).
+                    # Better-safe-than-sorry but it should never occur: we 
should
+                    # not put in short preambles guard_nonnull and guard_class
+                    # on the same box.
+                    self.optimizer.replace_guard(op, value)
+                    # not emitting the guard, so we have to pass None to
+                    # make_constant_class, so last_guard_pos is not updated
+                    self.emit_operation(op)
+                    value.make_constant_class(None, expectedclassbox)
+                    return
+        self.make_constant_class(op.getarg(0), expectedclassbox)
         self.emit_operation(op)
-        value.make_constant_class(self.optimizer, expectedclassbox)
 
     def optimize_GUARD_NONNULL_CLASS(self, op):
         value = self.getvalue(op.getarg(0))
diff --git a/rpython/jit/metainterp/optimizeopt/virtualize.py 
b/rpython/jit/metainterp/optimizeopt/virtualize.py
--- a/rpython/jit/metainterp/optimizeopt/virtualize.py
+++ b/rpython/jit/metainterp/optimizeopt/virtualize.py
@@ -13,7 +13,7 @@
 from rpython.rlib.objectmodel import we_are_translated, specialize
 from rpython.jit.metainterp.optimizeopt.intutils import IntUnbounded
 
-class AbstractVirtualInfo(info.PtrOptInfo):
+class AbstractVirtualInfo(info.PtrInfo):
     _attrs_ = ('_cached_vinfo',)
     _tag = info.LEVEL_NONNULL
     is_about_raw = False
@@ -22,14 +22,6 @@
     def is_forced_virtual(self):
         xxx
         return self.box is not None
-
-    def force_box(self, op, optforce):
-        op.set_forwarded(None)
-        optforce.emit_operation(op)
-        newop = optforce.getlastop()
-        op.set_forwarded(newop)
-        optforce.getptrinfo(newop).make_constant_class(None, self.known_class)
-        return newop
     
     #def force_box(self, optforce):
     #    xxxx
@@ -531,9 +523,9 @@
     _last_guard_not_forced_2 = None
 
     def make_virtual(self, known_class, source_op, descr):
-        info = VirtualInfo(known_class, descr)
-        source_op.set_forwarded(info)
-        return info
+        opinfo = info.InstancePtrInfo(known_class, is_virtual=True)
+        source_op.set_forwarded(opinfo)
+        return opinfo
 
     def make_varray(self, arraydescr, size, source_op, clear=False):
         if arraydescr.is_array_of_structs():
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to