svn commit: r999699 - in /hadoop/pig/branches/branch-0.8: ./ src/org/apache/pig/ src/org/apache/pig/backend/hadoop/executionengine/ src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/
Author: daijy Date: Wed Sep 22 00:48:36 2010 New Revision: 999699 URL: http://svn.apache.org/viewvc?rev=999699view=rev Log: PIG-1598: Pig gobbles up error messages - Part 2 Modified: hadoop/pig/branches/branch-0.8/CHANGES.txt hadoop/pig/branches/branch-0.8/src/org/apache/pig/PigServer.java hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java Modified: hadoop/pig/branches/branch-0.8/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/CHANGES.txt?rev=999699r1=999698r2=999699view=diff == --- hadoop/pig/branches/branch-0.8/CHANGES.txt (original) +++ hadoop/pig/branches/branch-0.8/CHANGES.txt Wed Sep 22 00:48:36 2010 @@ -192,6 +192,9 @@ PIG-1353: Map-side joins (ashutoshc) PIG-1309: Map-side Cogroup (ashutoshc) BUG FIXES + +PIG-1598: Pig gobbles up error messages - Part 2 (nrai via daijy) + PIG-1616: 'union onschema' does not use create output with correct schema when udfs are involved (thejas) Modified: hadoop/pig/branches/branch-0.8/src/org/apache/pig/PigServer.java URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/src/org/apache/pig/PigServer.java?rev=999699r1=999698r2=999699view=diff == --- hadoop/pig/branches/branch-0.8/src/org/apache/pig/PigServer.java (original) +++ hadoop/pig/branches/branch-0.8/src/org/apache/pig/PigServer.java Wed Sep 22 00:48:36 2010 @@ -1189,7 +1189,7 @@ public class PigServer { return executeCompiledLogicalPlan(typeCheckedLp); } -private PigStats executeCompiledLogicalPlan(LogicalPlan compiledLp) throws ExecException { +private PigStats executeCompiledLogicalPlan(LogicalPlan compiledLp) throws ExecException, FrontendException { // discover pig features used in this script ScriptState.get().setScriptFeatures(compiledLp); PhysicalPlan pp = compilePp(compiledLp); Modified: hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=999699r1=999698r2=999699view=diff == --- hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java (original) +++ hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java Wed Sep 22 00:48:36 2010 @@ -287,7 +287,7 @@ public class HExecutionEngine { } public ListExecJob execute(PhysicalPlan plan, - String jobName) throws ExecException { + String jobName) throws ExecException, FrontendException { MapReduceLauncher launcher = new MapReduceLauncher(); ListExecJob jobs = new ArrayListExecJob(); @@ -319,8 +319,11 @@ public class HExecutionEngine { } catch (Exception e) { // There are a lot of exceptions thrown by the launcher. If this // is an ExecException, just let it through. Else wrap it. -if (e instanceof ExecException) throw (ExecException)e; -else { +if (e instanceof ExecException){ + throw (ExecException)e; +} else if (e instanceof FrontendException) { + throw (FrontendException)e; +} else { int errCode = 2043; String msg = Unexpected error during execution.; throw new ExecException(msg, errCode, PigException.BUG, e); Modified: hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java?rev=999699r1=999698r2=999699view=diff == --- hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java (original) +++ hadoop/pig/branches/branch-0.8/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java Wed Sep 22 00:48:36 2010 @@ -1052,7 +1052,8 @@ public class MRCompiler extends PhyPlanV LoadFunc loadFunc = ((POLoad)phyOp).getLoadFunc(); try { if(!(CollectableLoadFunc.class.isAssignableFrom(loadFunc.getClass({ -throw new MRCompilerException(While using 'collected' on group; data must be loaded via loader implementing CollectableLoadFunc.); +int errCode = 2249; +
svn commit: r999703 - in /hadoop/pig/trunk: CHANGES.txt src/org/apache/pig/PigServer.java src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java src/org/apache/pig/backend/hadoop/exec
Author: daijy Date: Wed Sep 22 00:50:52 2010 New Revision: 999703 URL: http://svn.apache.org/viewvc?rev=999703view=rev Log: PIG-1598: Pig gobbles up error messages - Part 2 Modified: hadoop/pig/trunk/CHANGES.txt hadoop/pig/trunk/src/org/apache/pig/PigServer.java hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java Modified: hadoop/pig/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=999703r1=999702r2=999703view=diff == --- hadoop/pig/trunk/CHANGES.txt (original) +++ hadoop/pig/trunk/CHANGES.txt Wed Sep 22 00:50:52 2010 @@ -204,6 +204,8 @@ PIG-1309: Map-side Cogroup (ashutoshc) BUG FIXES +PIG-1598: Pig gobbles up error messages - Part 2 (nrai via daijy) + PIG-1616: 'union onschema' does not use create output with correct schema when udfs are involved (thejas) Modified: hadoop/pig/trunk/src/org/apache/pig/PigServer.java URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/PigServer.java?rev=999703r1=999702r2=999703view=diff == --- hadoop/pig/trunk/src/org/apache/pig/PigServer.java (original) +++ hadoop/pig/trunk/src/org/apache/pig/PigServer.java Wed Sep 22 00:50:52 2010 @@ -1189,7 +1189,7 @@ public class PigServer { return executeCompiledLogicalPlan(typeCheckedLp); } -private PigStats executeCompiledLogicalPlan(LogicalPlan compiledLp) throws ExecException { +private PigStats executeCompiledLogicalPlan(LogicalPlan compiledLp) throws ExecException, FrontendException { // discover pig features used in this script ScriptState.get().setScriptFeatures(compiledLp); PhysicalPlan pp = compilePp(compiledLp); Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=999703r1=999702r2=999703view=diff == --- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java (original) +++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java Wed Sep 22 00:50:52 2010 @@ -287,7 +287,7 @@ public class HExecutionEngine { } public ListExecJob execute(PhysicalPlan plan, - String jobName) throws ExecException { + String jobName) throws ExecException, FrontendException { MapReduceLauncher launcher = new MapReduceLauncher(); ListExecJob jobs = new ArrayListExecJob(); @@ -319,8 +319,11 @@ public class HExecutionEngine { } catch (Exception e) { // There are a lot of exceptions thrown by the launcher. If this // is an ExecException, just let it through. Else wrap it. -if (e instanceof ExecException) throw (ExecException)e; -else { +if (e instanceof ExecException){ + throw (ExecException)e; +} else if (e instanceof FrontendException) { + throw (FrontendException)e; +} else { int errCode = 2043; String msg = Unexpected error during execution.; throw new ExecException(msg, errCode, PigException.BUG, e); Modified: hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java?rev=999703r1=999702r2=999703view=diff == --- hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java (original) +++ hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java Wed Sep 22 00:50:52 2010 @@ -1052,7 +1052,8 @@ public class MRCompiler extends PhyPlanV LoadFunc loadFunc = ((POLoad)phyOp).getLoadFunc(); try { if(!(CollectableLoadFunc.class.isAssignableFrom(loadFunc.getClass({ -throw new MRCompilerException(While using 'collected' on group; data must be loaded via loader implementing CollectableLoadFunc.); +int errCode = 2249; +throw new MRCompilerException(While using 'collected' on group; data must be loaded via loader implementing CollectableLoadFunc., errCode); } ((CollectableLoadFunc)loadFunc).ensureAllKeyInstancesInSameSplit(); } catch (MRCompilerException e){ @@ -1077,8 +1078,9
[Pig Wiki] Update of SemanticsCleanup by AlanGates
Dear Wiki user, You have subscribed to a wiki page or wiki category on Pig Wiki for change notification. The SemanticsCleanup page has been changed by AlanGates. http://wiki.apache.org/pig/SemanticsCleanup?action=diffrev1=2rev2=3 -- || [[https://issues.apache.org/jira/browse/PIG-1584|PIG-1584]] || Grammar || Cogroup inner does not match the semantics of inner join. It is also not clear what value the inner keyword has for cogroup. Consider removing it. || || || [[https://issues.apache.org/jira/browse/PIG-1538|PIG-1538]] || Nested types || Remove two level access || Maybe, if we can find a way to ignore calls to Schema.isTwoLevelAccessRequired(). || || [[https://issues.apache.org/jira/browse/PIG-1536|PIG-1536]] || Schema || Pick one semantic for schema merges and use it consistently throughout Pig || no || + || [[https://issues.apache.org/jira/browse/PIG-1371|PIG-1371]] || Nested types || unknown || || || [[https://issues.apache.org/jira/browse/PIG-1341|PIG-1341]] || Dynamic type binding || Close as won't fix || yes || || [[https://issues.apache.org/jira/browse/PIG-1281|PIG-1281]] || Dynamic type binding || In situations where a Hadoop shuffle key is assumed to be of type bytearray wrap the value in a tuple so that if the type is actually something else Hadoop can still process it. || yes || || [[https://issues.apache.org/jira/browse/PIG-1277|PIG-1277]] || Nested types || Unknown || || + || [[https://issues.apache.org/jira/browse/PIG-1222|PIG-1222]] || Dynamic type binding || The issue here is that Pig thinks the field is a bytearray while BinStorage actually produces a String. Need a way to handle these issues on the fly. || || || [[https://issues.apache.org/jira/browse/PIG-1188|PIG-1188]] || Schema || Make sure Pig handles missing data in Tuples by returning a null rather than failing. || yes || || [[https://issues.apache.org/jira/browse/PIG-1112|PIG-1112]] || Schema || When user provides AS to flatten of undefined bag or tuple, the contents of that AS are taken to be the schema of the bag or tuple. || yes || || [[https://issues.apache.org/jira/browse/PIG-1065|PIG-1065]] || Dynamic type binding || In situations where a Hadoop shuffle key is assumed to be of type bytearray wrap the value in a tuple so that if the type is actually something else Hadoop can still process it. || yes || || [[https://issues.apache.org/jira/browse/PIG-999|PIG-999]] || Dynamic type binding || In situations where a Hadoop shuffle key is assumed to be of type bytearray wrap the value in a tuple so that if the type is actually something else Hadoop can still process it. || yes || + || [[https://issues.apache.org/jira/browse/PIG-847|PIG-847]] || Nested types || Remove two level access || maybe || + || [[https://issues.apache.org/jira/browse/PIG-828|PIG-828]] || Nested types || According to the rules of Pig Latin, this should produce a bag with one field. Need to make sure that is what Pig is trying to do in this case. || yes || || [[https://issues.apache.org/jira/browse/PIG-767|PIG-767]] || Nested types || Remove two level access; bring DUMP and DESCRIBE output into sync. || no || + || [[https://issues.apache.org/jira/browse/PIG-749|PIG-749]] || Schema || Related to PIG-1112 || yes || || [[https://issues.apache.org/jira/browse/PIG-730|PIG-730]] || Nested types || Make sure schema of union is the same as schema before union (suspect his is a two level access issue) || unclear || || [[https://issues.apache.org/jira/browse/PIG-723|PIG-723]] || Nested types || Suspect this is a two level access issue || unclear || || [[https://issues.apache.org/jira/browse/PIG-696|PIG-696]] || Dynamic type binding || Class cast exceptions such as this should result in a null value and a warning, not a failure. || yes || || [[https://issues.apache.org/jira/browse/PIG-694|PIG-694]] || Nested types || Determine the semantics for merging tuples and bags. || unclear || + || [[https://issues.apache.org/jira/browse/PIG-678|PIG-678]] || Grammar || Decide whether we want to support this extension. || yes || || [[https://issues.apache.org/jira/browse/PIG-621|PIG-621]] || Dynamic type binding || Class cast exceptions such as this should result in a null value and a warning, not a failure. || yes || || [[https://issues.apache.org/jira/browse/PIG-435|PIG-435]] || Schema || Decide definitely on what it means when users declare a schema for a load. || unclear || || [[https://issues.apache.org/jira/browse/PIG-333|PIG-333]] || Dynamic type binding || Since it is specified that MIN and MAX treat unknown types as double, all the actual string data should be converted to NULLs, rather than cause errors. || yes || || [[https://issues.apache.org/jira/browse/PIG-313|PIG-313]] || Grammar || I propose that we continue not supporting this. But we should detect it at compile time rather than at runtime. || yes || + + Bugs I need to add
svn commit: r999765 [2/2] - in /hadoop/pig/branches/branch-0.8: ./ src/org/apache/pig/ src/org/apache/pig/backend/hadoop/executionengine/ src/org/apache/pig/backend/hadoop/executionengine/mapReduceLay
Modified: hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/TestFRJoin2.java URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/TestFRJoin2.java?rev=999765r1=999764r2=999765view=diff == --- hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/TestFRJoin2.java (original) +++ hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/TestFRJoin2.java Wed Sep 22 05:43:05 2010 @@ -178,7 +178,6 @@ public class TestFRJoin2 { // test scalar alias with file concatenation following // a multi-query job -/* @Test public void testConcatenateJobForScalar3() throws Exception { PigServer pigServer = new PigServer(ExecType.MAPREDUCE, cluster @@ -221,7 +220,7 @@ public class TestFRJoin2 { assertEquals(dbfrj.size(), dbshj.size()); assertEquals(true, TestHelper.compareBags(dbfrj, dbshj)); -}*/ +} @Test public void testConcatenateJobForFRJoin() throws Exception { Modified: hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld URL: http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld?rev=999765r1=999764r2=999765view=diff == --- hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld (original) +++ hadoop/pig/branches/branch-0.8/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld Wed Sep 22 05:43:05 2010 @@ -18,4 +18,4 @@ Reduce Plan Empty | | | | | Project[tuple][*] - scope-126 | | -| |---b: Load(/tmp/input2:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MergeJoinIndexer('org.apache.pig.builtin.PigStorage','kmonaaafhdhcaabdgkgbhggbcohfhegjgmcoebhchcgbhjemgjhdhehiibncbnjjmhgbjnadaaabejaaaehdgjhkgfhihbhhaeaaabhdhcaaeogphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccohagmgbgohdcofagihjhdgjgdgbgmfagmgbgoaaabacaaabfkaaangfgogeepggebgmgmejgohahfhehihcaacfgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcfagmgbgogpkikkjgaddcgofpacaaagemaaakgneghcgpgnefgeghgfhdheaacdemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphfhegjgmcpenhfgmhegjengbhadlemaaafgnelgfhjhdheaaapemgkgbhggbcphfhegjgmcpengbhadlemaaahgnemgfgbhggfhdheaabaemgkgbhggbcphfhegjgmcpemgjhdhedlemaaaegnephahdhbaahoaaafemaaaggnfcgpgphehdhbaahoaaagemaaaignfegpefgeghgfhdhbaahoaaaehihahdhcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohfhegjgmcoenhfgmhegjengbhcacaaabemaaaegnen gbhahbaahoaaafhihahdhcaabbgkgbhggbcohfhegjgmcoeigbhdgiengbhaafahnkmbmdbgganbadaaacegaaakgmgpgbgeeggbgdhegphcejaaajhegihcgfhdgigpgmgehihadpemhhaiaabahihdhbaahoaaakdpemhhaiaabbhdhcaacegphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcelgfhjaaabacaaacekaaacgjgeemaaafhdgdgphagfheaabcemgkgbhggbcpgmgbgoghcpfdhehcgjgoghdlhihaaahhheaaafhdgdgphagfhdhcaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdheaaabacaaaffkaaakgphggfhcgmgpgbgegfgefkaabfhahcgpgdgfhdhdgjgoghecgbghepggfehfhagmgfhdfkaabehcgfhdhfgmhefdgjgoghgmgffehfhagmgfecgbghfkaaaehdhegbhcemaaahgdgpgmhfgngohdheaabfemgkgbhggbcphfhegjgmcpebhchcgbhjemgjhdhedlhihcaagcgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoeph agfhcgbhegphchdcoefhihahcgfhdhdgjgpgoephagfhcgbhegphcaaabacaaabemaaadgmgpghheaacaemgphcghcpgbhagbgdgigfcpgdgpgngngpgohdcpgmgpghghgjgoghcpemgpghdlhihcaaemgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofagihjhdgjgdgbgmephagfhcgbhegphcaaabacaaamfkaaafgbgdgdhfgnfkaaangjgohahfheebhehegbgdgigfgeejaabehcgfhbhfgfhdhegfgefagbhcgbgmgmgfgmgjhdgnecaaakhcgfhdhfgmhefehjhagfemaaafgbgmgjgbhdhbaahoaaaoemaaafgjgohahfheheaablemgphcghcpgbhagbgdgigfcphagjghcpgegbhegbcpfehfhagmgfdlemaaaggjgohahfhehdhbaahoaaagemaaangmgjgogfgbghgffehcgbgdgfhcheaachemgphcghcpgbhagbgdgigfcphagjghcphagfgocphfhegjgmcpemgjgogfgbghgffehcgbgdgfhcdlemaaadgmgpghhbaahoaabeemaaahgphfhehahfhehdhbaahoaaagemaaakhagbhcgfgohefagmgbgoheaafaemgphcghcpgbhagbgdgigfcphagjghcpgcgbgdglgfgogecpgigbgegpgphacpgfhigfgdhfhegjgpgogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccphagmgbgohdcpfagihjhdgjgdgbgmfagmgbgodlemaaadhcgfhdheaaeeemgphcghcpgbhagbgdgigf
svn commit: r999767 [2/2] - in /hadoop/pig/trunk: ./ src/org/apache/pig/ src/org/apache/pig/backend/hadoop/executionengine/ src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/ src/org/ap
Modified: hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld URL: http://svn.apache.org/viewvc/hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld?rev=999767r1=999766r2=999767view=diff == --- hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld (original) +++ hadoop/pig/trunk/test/org/apache/pig/test/data/GoldenFiles/MRC18.gld Wed Sep 22 05:45:21 2010 @@ -18,4 +18,4 @@ Reduce Plan Empty | | | | | Project[tuple][*] - scope-126 | | -| |---b: Load(/tmp/input2:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MergeJoinIndexer('org.apache.pig.builtin.PigStorage','kmonaaafhdhcaabdgkgbhggbcohfhegjgmcoebhchcgbhjemgjhdhehiibncbnjjmhgbjnadaaabejaaaehdgjhkgfhihbhhaeaaabhdhcaaeogphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccohagmgbgohdcofagihjhdgjgdgbgmfagmgbgoaaabacaaabfkaaangfgogeepggebgmgmejgohahfhehihcaacfgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcfagmgbgogpkikkjgaddcgofpacaaagemaaakgneghcgpgnefgeghgfhdheaacdemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphfhegjgmcpenhfgmhegjengbhadlemaaafgnelgfhjhdheaaapemgkgbhggbcphfhegjgmcpengbhadlemaaahgnemgfgbhggfhdheaabaemgkgbhggbcphfhegjgmcpemgjhdhedlemaaaegnephahdhbaahoaaafemaaaggnfcgpgphehdhbaahoaaagemaaaignfegpefgeghgfhdhbaahoaaaehihahdhcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohfhegjgmcoenhfgmhegjengbhcacaaabemaaaegnen gbhahbaahoaaafhihahdhcaabbgkgbhggbcohfhegjgmcoeigbhdgiengbhaafahnkmbmdbgganbadaaacegaaakgmgpgbgeeggbgdhegphcejaaajhegihcgfhdgigpgmgehihadpemhhaiaabahihdhbaahoaaakdpemhhaiaabbhdhcaacegphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcelgfhjaaabacaaacekaaacgjgeemaaafhdgdgphagfheaabcemgkgbhggbcpgmgbgoghcpfdhehcgjgoghdlhihaaahhheaaafhdgdgphagfhdhcaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdheaaabacaaaffkaaakgphggfhcgmgpgbgegfgefkaabfhahcgpgdgfhdhdgjgoghecgbghepggfehfhagmgfhdfkaabehcgfhdhfgmhefdgjgoghgmgffehfhagmgfecgbghfkaaaehdhegbhcemaaahgdgpgmhfgngohdheaabfemgkgbhggbcphfhegjgmcpebhchcgbhjemgjhdhedlhihcaagcgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoeph agfhcgbhegphchdcoefhihahcgfhdhdgjgpgoephagfhcgbhegphcaaabacaaabemaaadgmgpghheaacaemgphcghcpgbhagbgdgigfcpgdgpgngngpgohdcpgmgpghghgjgoghcpemgpghdlhihcaaemgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofagihjhdgjgdgbgmephagfhcgbhegphcaaabacaaamfkaaafgbgdgdhfgnfkaaangjgohahfheebhehegbgdgigfgeejaabehcgfhbhfgfhdhegfgefagbhcgbgmgmgfgmgjhdgnecaaakhcgfhdhfgmhefehjhagfemaaafgbgmgjgbhdhbaahoaaaoemaaafgjgohahfheheaablemgphcghcpgbhagbgdgigfcphagjghcpgegbhegbcpfehfhagmgfdlemaaaggjgohahfhehdhbaahoaaagemaaangmgjgogfgbghgffehcgbgdgfhcheaachemgphcghcpgbhagbgdgigfcphagjghcphagfgocphfhegjgmcpemgjgogfgbghgffehcgbgdgfhcdlemaaadgmgpghhbaahoaabeemaaahgphfhehahfhehdhbaahoaaagemaaakhagbhcgfgohefagmgbgoheaafaemgphcghcpgbhagbgdgigfcphagjghcpgcgbgdglgfgogecpgigbgegpgphacpgfhigfgdhfhegjgpgogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccphagmgbgohdcpfagihjhdgjgdgbgmfagmgbgodlemaaadhcgfhdheaaeeemgphcghcpgbhagbgdgigf cphagjghcpgcgbgdglgfgogecpgigbgegpgphacpgfhigfgdhfhegjgpgogfgoghgjgogfcphagihjhdgjgdgbgmemgbhjgfhccpfcgfhdhfgmhedlhihcaacbgphcghcogbhagbgdgigfcohagjghcogjgnhagmcohagmgbgocoephagfhcgbhegphcaaabacaaabemaaaegnelgfhjheaacgemgphcghcpgbhagbgdgigfcphagjghcpgjgnhagmcphagmgbgocpephagfhcgbhegphcelgfhjdlhihahbaahoaaapdchahahahahdhcaaclgphcghcogbhagbgdgigfcogdgpgngngpgohdcogmgpghghgjgoghcogjgnhagmcoemgpghdeekemgpghghgfhccikmpnoicknfncdiacaaabemaaaegogbgngfhbaahoaaaohihaheaafjgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccogfhihahcgfhdhdgjgpgoephagfhcgbhegphchdcofaepfahcgpgkgfgdhehahahdhcaaecgphcghcogbhagbgdgigfcohagjghcogcgbgdglgfgogecogigbgegpgphacogfhigfgdhfhegjgpgogfgoghgjgogfcohagihjhdgjgdgbgmemgbhjgfhccofcgfhdhfgmheaaabacaaacecaaamhcgfhehfhcgofdhegbhehfhdemaaaghcgfhdhfgmheheaabcemgkgbhggbcpgmgbgoghcpepgcgkgfgdhedlhihaachahbaahoaabohdhbaahoaaabhhaeaaabhdhca abbgkgbhggbcogmgbgoghcoejgohegfghgfhcbcockakephibihdiacaaabejaaafhggbgmhfgfhihcaabagkgbhggbcogmgbgoghcoeohfgngcgfhcigkmjfbnaljeoailachihahihihdhbaahoaaabhhaeaaakhbaahoaabmhihdhbaahoaaakdpemhhaiaabbhbaahoaabmhbaahoaaaphihdhbaahohhaeaaakhihdhbaahoaaaihdhbaahoaaakdpemhhaiaabahiaahi','','b','scope','true')) - scope-117 \ No newline at end of file +