We need to rework permission type mapping to nodesets, which means we
need to move the nodeset computations earlier in the dfa creation
processes, instead of a post step of follow(), so move the nodeset
into expr-tree

Signed-off-by: John Johansen <[email protected]>
Acked-by: Steve Beattie <[email protected]>
Acked-by: Seth Arnold <[email protected]>

---
 parser/libapparmor_re/expr-tree.h |  176 ++++++++++++++++++++++++++++++++++++++
 parser/libapparmor_re/hfa.h       |  175 -------------------------------------
 2 files changed, 176 insertions(+), 175 deletions(-)

--- 2.9-test.orig/parser/libapparmor_re/expr-tree.h
+++ 2.9-test/parser/libapparmor_re/expr-tree.h
@@ -607,4 +607,180 @@
        DenyMatchFlag(uint32_t flag, uint32_t quiet): MatchFlag(flag, quiet) {}
 };
 
+
+/*
+ * hashedNodes - for efficient set comparison
+ */
+class hashedNodeSet {
+public:
+       unsigned long hash;
+       NodeSet *nodes;
+
+       hashedNodeSet(NodeSet *n): nodes(n)
+       {
+               hash = hash_NodeSet(n);
+       }
+
+       bool operator<(hashedNodeSet const &rhs)const
+       {
+               if (hash == rhs.hash) {
+                       if (nodes->size() == rhs.nodes->size())
+                               return *nodes < *(rhs.nodes);
+                       else
+                               return nodes->size() < rhs.nodes->size();
+               } else {
+                       return hash < rhs.hash;
+               }
+       }
+};
+
+
+class hashedNodeVec {
+public:
+       typedef ImportantNode ** iterator;
+       iterator begin() { return nodes; }
+       iterator end() { iterator t = nodes ? &nodes[len] : NULL; return t; }
+
+       unsigned long hash;
+       unsigned long len;
+       ImportantNode **nodes;
+
+       hashedNodeVec(NodeSet *n)
+       {
+               hash = hash_NodeSet(n);
+               len = n->size();
+               nodes = new ImportantNode *[n->size()];
+
+               unsigned int j = 0;
+               for (NodeSet::iterator i = n->begin(); i != n->end(); i++, j++) 
{
+                       nodes[j] = *i;
+               }
+       }
+
+       hashedNodeVec(NodeSet *n, unsigned long h): hash(h)
+       {
+               len = n->size();
+               nodes = new ImportantNode *[n->size()];
+               ImportantNode **j = nodes;
+               for (NodeSet::iterator i = n->begin(); i != n->end(); i++) {
+                       *(j++) = *i;
+               }
+       }
+
+       ~hashedNodeVec()
+       {
+               delete nodes;
+       }
+
+       unsigned long size()const { return len; }
+
+       bool operator<(hashedNodeVec const &rhs)const
+       {
+               if (hash == rhs.hash) {
+                       if (len == rhs.size()) {
+                               for (unsigned int i = 0; i < len; i++) {
+                                       if (nodes[i] != rhs.nodes[i])
+                                               return nodes[i] < rhs.nodes[i];
+                               }
+                               return false;
+                       }
+                       return len < rhs.size();
+               }
+               return hash < rhs.hash;
+       }
+};
+
+class CacheStats {
+public:
+       unsigned long dup, sum, max;
+
+       CacheStats(void): dup(0), sum(0), max(0) { };
+
+       void clear(void) { dup = sum = max = 0; }
+       virtual unsigned long size(void) const = 0;
+};
+
+class NodeCache: public CacheStats {
+public:
+       set<hashedNodeSet> cache;
+
+       NodeCache(void): cache() { };
+       ~NodeCache() { clear(); };
+
+       virtual unsigned long size(void) const { return cache.size(); }
+
+       void clear()
+       {
+               for (set<hashedNodeSet>::iterator i = cache.begin();
+                    i != cache.end(); i++) {
+                       delete i->nodes;
+               }
+               cache.clear();
+               CacheStats::clear();
+       }
+
+       NodeSet *insert(NodeSet *nodes)
+       {
+               if (!nodes)
+                       return NULL;
+               pair<set<hashedNodeSet>::iterator,bool> uniq;
+               uniq = cache.insert(hashedNodeSet(nodes));
+               if (uniq.second == false) {
+                       delete(nodes);
+                       dup++;
+               } else {
+                       sum += nodes->size();
+                       if (nodes->size() > max)
+                               max = nodes->size();
+               }
+               return uniq.first->nodes;
+       }
+};
+
+struct deref_less_than {
+       bool operator()(hashedNodeVec * const &lhs, hashedNodeVec * const 
&rhs)const
+               {
+                       return *lhs < *rhs;
+               }
+};
+
+class NodeVecCache: public CacheStats {
+public:
+       set<hashedNodeVec *, deref_less_than> cache;
+
+       NodeVecCache(void): cache() { };
+       ~NodeVecCache() { clear(); };
+
+       virtual unsigned long size(void) const { return cache.size(); }
+
+       void clear()
+       {
+               for (set<hashedNodeVec *>::iterator i = cache.begin();
+                    i != cache.end(); i++) {
+                       delete *i;
+               }
+               cache.clear();
+               CacheStats::clear();
+       }
+
+       hashedNodeVec *insert(NodeSet *nodes)
+       {
+               if (!nodes)
+                       return NULL;
+               pair<set<hashedNodeVec *>::iterator,bool> uniq;
+               hashedNodeVec *nv = new hashedNodeVec(nodes);
+               uniq = cache.insert(nv);
+               if (uniq.second == false) {
+                       delete nv;
+                       dup++;
+               } else {
+                       sum += nodes->size();
+                       if (nodes->size() > max)
+                               max = nodes->size();
+               }
+               delete(nodes);
+               return (*uniq.first);
+       }
+};
+
 #endif /* __LIBAA_RE_EXPR */
--- 2.9-test.orig/parser/libapparmor_re/hfa.h
+++ 2.9-test/parser/libapparmor_re/hfa.h
@@ -131,181 +131,6 @@
 int accept_perms(NodeSet *state, perms_t &perms);
 
 /*
- * hashedNodes - for efficient set comparison
- */
-class hashedNodeSet {
-public:
-       unsigned long hash;
-       NodeSet *nodes;
-
-       hashedNodeSet(NodeSet *n): nodes(n)
-       {
-               hash = hash_NodeSet(n);
-       }
-
-       bool operator<(hashedNodeSet const &rhs)const
-       {
-               if (hash == rhs.hash) {
-                       if (nodes->size() == rhs.nodes->size())
-                               return *nodes < *(rhs.nodes);
-                       else
-                               return nodes->size() < rhs.nodes->size();
-               } else {
-                       return hash < rhs.hash;
-               }
-       }
-};
-
-
-class hashedNodeVec {
-public:
-       typedef ImportantNode ** iterator;
-       iterator begin() { return nodes; }
-       iterator end() { iterator t = nodes ? &nodes[len] : NULL; return t; }
-
-       unsigned long hash;
-       unsigned long len;
-       ImportantNode **nodes;
-
-       hashedNodeVec(NodeSet *n)
-       {
-               hash = hash_NodeSet(n);
-               len = n->size();
-               nodes = new ImportantNode *[n->size()];
-
-               unsigned int j = 0;
-               for (NodeSet::iterator i = n->begin(); i != n->end(); i++, j++) 
{
-                       nodes[j] = *i;
-               }
-       }
-
-       hashedNodeVec(NodeSet *n, unsigned long h): hash(h)
-       {
-               len = n->size();
-               nodes = new ImportantNode *[n->size()];
-               ImportantNode **j = nodes;
-               for (NodeSet::iterator i = n->begin(); i != n->end(); i++) {
-                       *(j++) = *i;
-               }
-       }
-
-       ~hashedNodeVec()
-       {
-               delete nodes;
-       }
-
-       unsigned long size()const { return len; }
-
-       bool operator<(hashedNodeVec const &rhs)const
-       {
-               if (hash == rhs.hash) {
-                       if (len == rhs.size()) {
-                               for (unsigned int i = 0; i < len; i++) {
-                                       if (nodes[i] != rhs.nodes[i])
-                                               return nodes[i] < rhs.nodes[i];
-                               }
-                               return false;
-                       }
-                       return len < rhs.size();
-               }
-               return hash < rhs.hash;
-       }
-};
-
-class CacheStats {
-public:
-       unsigned long dup, sum, max;
-
-       CacheStats(void): dup(0), sum(0), max(0) { };
-
-       void clear(void) { dup = sum = max = 0; }
-       virtual unsigned long size(void) const = 0;
-};
-
-class NodeCache: public CacheStats {
-public:
-       set<hashedNodeSet> cache;
-
-       NodeCache(void): cache() { };
-       ~NodeCache() { clear(); };
-
-       virtual unsigned long size(void) const { return cache.size(); }
-
-       void clear()
-       {
-               for (set<hashedNodeSet>::iterator i = cache.begin();
-                    i != cache.end(); i++) {
-                       delete i->nodes;
-               }
-               cache.clear();
-               CacheStats::clear();
-       }
-
-       NodeSet *insert(NodeSet *nodes)
-       {
-               if (!nodes)
-                       return NULL;
-               pair<set<hashedNodeSet>::iterator,bool> uniq;
-               uniq = cache.insert(hashedNodeSet(nodes));
-               if (uniq.second == false) {
-                       delete(nodes);
-                       dup++;
-               } else {
-                       sum += nodes->size();
-                       if (nodes->size() > max)
-                               max = nodes->size();
-               }
-               return uniq.first->nodes;
-       }
-};
-
-struct deref_less_than {
-       bool operator()(hashedNodeVec * const &lhs, hashedNodeVec * const 
&rhs)const
-               {
-                       return *lhs < *rhs;
-               }
-};
-
-class NodeVecCache: public CacheStats {
-public:
-       set<hashedNodeVec *, deref_less_than> cache;
-
-       NodeVecCache(void): cache() { };
-       ~NodeVecCache() { clear(); };
-
-       virtual unsigned long size(void) const { return cache.size(); }
-
-       void clear()
-       {
-               for (set<hashedNodeVec *>::iterator i = cache.begin();
-                    i != cache.end(); i++) {
-                       delete *i;
-               }
-               cache.clear();
-               CacheStats::clear();
-       }
-
-       hashedNodeVec *insert(NodeSet *nodes)
-       {
-               if (!nodes)
-                       return NULL;
-               pair<set<hashedNodeVec *>::iterator,bool> uniq;
-               hashedNodeVec *nv = new hashedNodeVec(nodes);
-               uniq = cache.insert(nv);
-               if (uniq.second == false) {
-                       delete nv;
-                       dup++;
-               } else {
-                       sum += nodes->size();
-                       if (nodes->size() > max)
-                               max = nodes->size();
-               }
-               delete(nodes);
-               return (*uniq.first);
-       }
-};
-
-/*
  * ProtoState - NodeSet and ancillery information used to create a state
  */
 class ProtoState {


-- 
AppArmor mailing list
[email protected]
Modify settings or unsubscribe at: 
https://lists.ubuntu.com/mailman/listinfo/apparmor

Reply via email to