This is an automated email from the ASF dual-hosted git repository.

sebb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-ponymail.git


The following commit(s) were added to refs/heads/master by this push:
     new 1ec97ee  Bug: unnecessary test (will always succeed) in copy-list.py
1ec97ee is described below

commit 1ec97ee3032f0e7908a83791355d270bedeb62dd
Author: Sebb <s...@apache.org>
AuthorDate: Thu May 17 13:32:30 2018 +0100

    Bug: unnecessary test (will always succeed) in copy-list.py
    
    This fixes #451
---
 CHANGELOG.md       |   1 +
 tools/copy-list.py | 115 ++++++++++++++++++++++++++---------------------------
 2 files changed, 58 insertions(+), 58 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 05bb538..f4b544a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,4 +1,5 @@
 ## Changes in 0.11-SNAPSHOT
+- Bug: unnecessary test (will always succeed) in copy-list.py (#451)
 - Bug: archiver ignores failures if dumponfail is not defined (#449)
 - Enh: make MboxoFactory optional (#442)
 - Bug: duplication of data in response from thread.lua (#440)
diff --git a/tools/copy-list.py b/tools/copy-list.py
index 6920063..44ed332 100755
--- a/tools/copy-list.py
+++ b/tools/copy-list.py
@@ -118,71 +118,70 @@ if newdb:
 count = 0
 
     
-if targetLID or newdb:
-    print("Updating docs...")
-    then = time.time()
-    page = es.search(
-        index=dbname,
-        doc_type="mbox",
-        scroll = '30m',
-        search_type = 'scan',
-        size = 100,
-        body = {
-            'query': {
-                'bool': {
-                    'must': [
-                        {
-                            'wildcard' if wildcard else 'term': {
-                                'list_raw': sourceLID
-                            }
+print("Updating docs...")
+then = time.time()
+page = es.search(
+    index=dbname,
+    doc_type="mbox",
+    scroll = '30m',
+    search_type = 'scan',
+    size = 100,
+    body = {
+        'query': {
+            'bool': {
+                'must': [
+                    {
+                        'wildcard' if wildcard else 'term': {
+                            'list_raw': sourceLID
                         }
-                    ]
-                }
+                    }
+                ]
             }
         }
-        )
+    }
+    )
+sid = page['_scroll_id']
+scroll_size = page['hits']['total']
+js_arr = []
+while (scroll_size > 0):
+    page = es.scroll(scroll_id = sid, scroll = '30m')
     sid = page['_scroll_id']
-    scroll_size = page['hits']['total']
-    js_arr = []
-    while (scroll_size > 0):
-        page = es.scroll(scroll_id = sid, scroll = '30m')
-        sid = page['_scroll_id']
-        scroll_size = len(page['hits']['hits'])
-        for hit in page['hits']['hits']:
-            doc = hit['_id']
-            body = es.get(index = dbname, doc_type = 'mbox', id = doc)
-            source = None
-            try:
-                source = es.get(index = dbname, doc_type = 'mbox_source', id = 
doc)
-            except:
-                print("Source for %s not found, hmm..." % doc)
-            if targetLID:
-                if not newdb:
-                    body['list_raw'] = targetLID
-                    body['list'] = targetLID
+    scroll_size = len(page['hits']['hits'])
+    for hit in page['hits']['hits']:
+        doc = hit['_id']
+        body = es.get(index = dbname, doc_type = 'mbox', id = doc)
+        source = None
+        try:
+            source = es.get(index = dbname, doc_type = 'mbox_source', id = doc)
+        except:
+            print("Source for %s not found, hmm..." % doc)
+        if targetLID:
+            if not newdb:
+                body['list_raw'] = targetLID
+                body['list'] = targetLID
+        js_arr.append({
+            '_op_type': 'index',
+            '_index': newdb if newdb else dbname,
+            '_type': 'mbox',
+            '_id': doc,
+            '_source': body['_source']
+        })
+        if source:
             js_arr.append({
                 '_op_type': 'index',
                 '_index': newdb if newdb else dbname,
-                '_type': 'mbox',
+                '_type': 'mbox_source',
                 '_id': doc,
-                '_source': body['_source']
+                '_source': source['_source']
             })
-            if source:
-                js_arr.append({
-                    '_op_type': 'index',
-                    '_index': newdb if newdb else dbname,
-                    '_type': 'mbox_source',
-                    '_id': doc,
-                    '_source': source['_source']
-                })
+        
+        count += 1
+        if (count % 50 == 0):
+            print("Processed %u emails..." % count)
+            helpers.bulk(es, js_arr)
+            js_arr = []
+
+if len(js_arr) > 0:
+    helpers.bulk(es, js_arr)
             
-            count += 1
-            if (count % 50 == 0):
-                print("Processed %u emails..." % count)
-                helpers.bulk(es, js_arr)
-                js_arr = []
-    
-    if len(js_arr) > 0:
-        helpers.bulk(es, js_arr)
-                
-    print("All done, processed %u docs in %u seconds" % (count, time.time() - 
then))
+print("All done, processed %u docs in %u seconds" % (count, time.time() - 
then))

-- 
To stop receiving notification emails like this one, please contact
s...@apache.org.

Reply via email to