Kuldeep Joshi(OpenERP) has proposed merging 
lp:~openerp-dev/openobject-addons/trunk-fix-server-log-account-logger-kjo into 
lp:~openerp-dev/openobject-addons/trunk-fix-server-log.

Requested reviews:
  Bhumika (OpenERP) (sbh-openerp)

For more details, see:
https://code.launchpad.net/~openerp-dev/openobject-addons/trunk-fix-server-log-account-logger-kjo/+merge/111554
-- 
https://code.launchpad.net/~openerp-dev/openobject-addons/trunk-fix-server-log-account-logger-kjo/+merge/111554
Your team OpenERP R&D Team is subscribed to branch 
lp:~openerp-dev/openobject-addons/trunk-fix-server-log.
=== modified file 'account/account.py'
--- account/account.py	2012-06-21 09:36:07 +0000
+++ account/account.py	2012-06-22 07:29:21 +0000
@@ -29,6 +29,7 @@
 from osv import fields, osv
 import decimal_precision as dp
 from tools.translate import _
+_logger = logging.getLogger(__name__)
 
 def check_cycle(self, cr, uid, ids, context=None):
     """ climbs the ``self._table.parent_id`` chains for 100 levels or
@@ -294,7 +295,7 @@
             if aml_query.strip():
                 wheres.append(aml_query.strip())
             filters = " AND ".join(wheres)
-            logging.getLogger('account').debug('Filters: %s'%filters)
+            _logger.debug('Filters: %s'%filters)
             # IN might not work ideally in case there are too many
             # children_and_consolidated, in that case join on a
             # values() e.g.:
@@ -310,7 +311,7 @@
                        " GROUP BY l.account_id")
             params = (tuple(children_and_consolidated),) + query_params
             cr.execute(request, params)
-            logging.getLogger('account').debug('Status: %s'%cr.statusmessage)
+            _logger.debug('Status: %s'%cr.statusmessage)
 
             for res in cr.dictfetchall():
                 accounts[res['id']] = res
@@ -2094,7 +2095,7 @@
         }
 
     def compute(self, cr, uid, taxes, price_unit, quantity,  product=None, partner=None):
-        logging.getLogger('account').debug("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included")
+        _logger.debug("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included")
         return self._compute(cr, uid, taxes, price_unit, quantity, product, partner)
 
     def _compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None):

=== modified file 'account/installer.py'
--- account/installer.py	2012-06-05 11:19:46 +0000
+++ account/installer.py	2012-06-22 07:29:21 +0000
@@ -30,11 +30,11 @@
 from osv import fields, osv
 import netsvc
 import tools
+_logger = logging.getLogger(__name__)
 
 class account_installer(osv.osv_memory):
     _name = 'account.installer'
     _inherit = 'res.config.installer'
-    __logger = logging.getLogger(_name)
 
     def _get_charts(self, cr, uid, context=None):
         modules = self.pool.get('ir.module.module')
@@ -149,7 +149,7 @@
             cr, uid, ids, context=context)
         chart = self.read(cr, uid, ids, ['charts'],
                           context=context)[0]['charts']
-        self.__logger.debug('Installing chart of accounts %s', chart)
+        _logger.debug('Installing chart of accounts %s', chart)
         return modules | set([chart])
 
 account_installer()

=== modified file 'account_bank_statement_extensions/report/bank_statement_balance_report.py'
--- account_bank_statement_extensions/report/bank_statement_balance_report.py	2012-06-13 09:06:00 +0000
+++ account_bank_statement_extensions/report/bank_statement_balance_report.py	2012-06-22 07:29:21 +0000
@@ -24,11 +24,12 @@
 from report import report_sxw
 import pooler
 import logging
+_logger = logging.getLogger(__name__)
 
 class bank_statement_balance_report(report_sxw.rml_parse):
 
     def set_context(self, objects, data, ids, report_type=None):
-        #logging('bank.statement.balance.report').warning('addons.'+__name__, 'set_context, objects = %s, data = %s, ids = %s' % (objects, data, ids))
+        #_logger.warning('addons.'+__name__, 'set_context, objects = %s, data = %s, ids = %s' % (objects, data, ids))
         cr = self.cr
         uid = self.uid
         context = self.context

=== modified file 'account_coda/wizard/account_coda_import.py'
--- account_coda/wizard/account_coda_import.py	2012-06-13 09:06:00 +0000
+++ account_coda/wizard/account_coda_import.py	2012-06-22 07:29:21 +0000
@@ -28,6 +28,7 @@
 import re
 from traceback import format_exception
 from sys import exc_info
+_logger = logging.getLogger(__name__)
 
 class account_coda_import(osv.osv_memory):
     _name = 'account.coda.import'
@@ -815,7 +816,7 @@
                                         ttype = line['type'] == 'supplier' and 'payment' or 'receipt',
                                         date = line['val_date'],
                                         context = context)
-                                    #logging('account.coda').warning('voucher_dict = %s' % voucher_dict) 
+                                    #_logger.warning('voucher_dict = %s' % voucher_dict) 
                                     voucher_line_vals = False
                                     if voucher_dict['value']['line_ids']:
                                         for line_dict in voucher_dict['value']['line_ids']:
@@ -888,19 +889,19 @@
                 nb_err += 1
                 err_string += _('\nError ! ') + str(e)
                 tb = ''.join(format_exception(*exc_info()))
-                logging('account.coda').error('Application Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
+                _logger.error('Application Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
             except Exception, e:
                 cr.rollback()
                 nb_err += 1
                 err_string += _('\nSystem Error : ') + str(e)
                 tb = ''.join(format_exception(*exc_info()))
-                logging('account.coda').error('System Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
+                _logger.error('System Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
             except :
                 cr.rollback()
                 nb_err += 1
                 err_string = _('\nUnknown Error : ') + str(e)
                 tb = ''.join(format_exception(*exc_info()))
-                logging('account.coda').error('Unknown Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
+                _logger.error('Unknown Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
 
         # end 'for statement in coda_statements'
                           

=== modified file 'auth_openid/controllers/main.py'
--- auth_openid/controllers/main.py	2012-02-13 15:27:55 +0000
+++ auth_openid/controllers/main.py	2012-06-22 07:29:21 +0000
@@ -44,8 +44,8 @@
 
 
 
-_logger = logging.getLogger('web.auth_openid')
-oidutil.log = logging.getLogger('openid').debug
+_logger = logging.getLogger(__name__)
+oidutil.log = _logger.debug
 
 
 class GoogleAppsAwareConsumer(consumer.GenericConsumer):

=== modified file 'base_crypt/crypt.py'
--- base_crypt/crypt.py	2012-03-30 01:14:07 +0000
+++ base_crypt/crypt.py	2012-06-22 07:29:21 +0000
@@ -42,8 +42,10 @@
 import pooler
 from tools.translate import _
 from service import security
+import logging
 
 magic_md5 = '$1$'
+_logger = logging.getLogger(__name__)
 
 def gen_salt( length=8, symbols=ascii_letters + digits ):
     seed()
@@ -179,8 +181,7 @@
             cr = pooler.get_db(db).cursor()
             return self._login(cr, db, login, password)
         except Exception:
-            import logging
-            logging.getLogger('netsvc').exception('Could not authenticate')
+            _logger.exception('Could not authenticate')
             return Exception('Access Denied')
         finally:
             if cr is not None:

=== modified file 'base_report_designer/plugin/openerp_report_designer/bin/script/lib/logreport.py'
--- base_report_designer/plugin/openerp_report_designer/bin/script/lib/logreport.py	2011-12-19 16:54:40 +0000
+++ base_report_designer/plugin/openerp_report_designer/bin/script/lib/logreport.py	2012-06-22 07:29:21 +0000
@@ -27,21 +27,20 @@
 LOG_WARNING='warn'
 LOG_ERROR='error'
 LOG_CRITICAL='critical'
+_logger = logging.getLogger(__name__)
 
 def log_detail(self):
     import os
-    logger = logging.getLogger()
     logfile_name = os.path.join(tempfile.gettempdir(), "openerp_report_designer.log")
     hdlr = logging.FileHandler(logfile_name)
     formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
     hdlr.setFormatter(formatter)
-    logger.addHandler(hdlr)
-    logger.setLevel(logging.INFO)
+    _logger.addHandler(hdlr)
+    _logger.setLevel(logging.INFO)
 
 class Logger(object):
     def log_write(self,name,level,msg):
-        log = logging.getLogger(name)
-        getattr(log,level)(msg)
+        getattr(_logger,level)(msg)
 
     def shutdown(self):
         logging.shutdown()

=== modified file 'base_vat/base_vat.py'
--- base_vat/base_vat.py	2012-02-08 15:26:47 +0000
+++ base_vat/base_vat.py	2012-06-22 07:29:21 +0000
@@ -23,11 +23,12 @@
 import string
 import datetime
 import re
+_logger = logging.getLogger(__name__)
 
 try:
     import vatnumber
 except ImportError:
-    logging.getLogger('base_vat').warning("VAT validation partially unavailable because the `vatnumber` Python library cannot be found. "
+    _logger.warning("VAT validation partially unavailable because the `vatnumber` Python library cannot be found. "
                                           "Install it to support more countries, for example with `easy_install vatnumber`.")
     vatnumber = None
 

=== modified file 'caldav/caldav_node.py'
--- caldav/caldav_node.py	2011-10-27 21:11:24 +0000
+++ caldav/caldav_node.py	2012-06-22 07:29:21 +0000
@@ -23,6 +23,7 @@
 from document.nodes import _str2time, nodefd_static
 import logging
 from orm_utils import get_last_modified
+_logger = logging.getLogger(__name__)
 
 try:
     from tools.dict_tools import  dict_merge2
@@ -223,7 +224,6 @@
         res = []
         if not filters:
             return res
-        _log = logging.getLogger('caldav.query')
         if filters.localName == 'calendar-query':
             res = []
             for filter_child in filters.childNodes:

=== modified file 'caldav/calendar.py'
--- caldav/calendar.py	2011-10-27 21:11:24 +0000
+++ caldav/calendar.py	2012-06-22 07:29:21 +0000
@@ -34,6 +34,7 @@
 from caldav_node import res_node_calendar
 from orm_utils import get_last_modified
 from tools.safe_eval import safe_eval as eval
+_logger = logging.getLogger(__name__)
 
 try:
     import vobject
@@ -240,7 +241,6 @@
 
 class CalDAV(object):
     __attribute__ = {}
-    _logger = logging.getLogger('document.caldav')
 
     def ical_set(self, name, value, type):
         """ set calendar Attribute
@@ -725,13 +725,13 @@
                 objs.append(cal_children[child.name.lower()])
             elif child.name.upper() == 'CALSCALE':
                 if child.value.upper() != 'GREGORIAN':
-                    self._logger.warning('How do I handle %s calendars?',child.value)
+                    _logger.warning('How do I handle %s calendars?',child.value)
             elif child.name.upper() in ('PRODID', 'VERSION'):
                 pass
             elif child.name.upper().startswith('X-'):
-                self._logger.debug("skipping custom node %s", child.name)
+                _logger.debug("skipping custom node %s", child.name)
             else:
-                self._logger.debug("skipping node %s", child.name)
+                _logger.debug("skipping node %s", child.name)
         
         res = []
         for obj_name in list(set(objs)):

=== modified file 'caldav/calendar_collection.py'
--- caldav/calendar_collection.py	2011-12-19 16:54:40 +0000
+++ caldav/calendar_collection.py	2012-06-22 07:29:21 +0000
@@ -23,6 +23,7 @@
 from tools.translate import _
 import caldav_node
 import logging
+_logger = logging.getLogger(__name__)
 
 class calendar_collection(osv.osv):
     _inherit = 'document.directory' 
@@ -44,8 +45,7 @@
             root_cal_dir = self.browse(cr,uid, root_id, context=context) 
             return root_cal_dir.name
         except Exception:
-            logger = logging.getLogger('document')
-            logger.warning('Cannot set root directory for Calendars:', exc_info=True)
+            _logger.warning('Cannot set root directory for Calendars:', exc_info=True)
             return False
         return False
 

=== modified file 'crm/crm_meeting.py'
--- crm/crm_meeting.py	2012-06-13 12:33:01 +0000
+++ crm/crm_meeting.py	2012-06-22 07:29:21 +0000
@@ -26,6 +26,7 @@
 from osv import fields, osv
 import tools
 from tools.translate import _
+_logger = logging.getLogger(__name__)
 
 class crm_lead(base_stage, osv.osv):
         """ CRM Leads """
@@ -180,7 +181,7 @@
                                             'user_id': user_id}, context=context)
             except:
                 # Tolerate a missing shortcut. See product/product.py for similar code.
-                logging.getLogger('orm').debug('Skipped meetings shortcut for user "%s"', data.get('name','<new'))
+                _logger.debug('Skipped meetings shortcut for user "%s"', data.get('name','<new'))
         return user_id
 
 res_users()

=== modified file 'edi/__init__.py'
--- edi/__init__.py	2011-12-19 16:54:40 +0000
+++ edi/__init__.py	2012-06-22 07:29:21 +0000
@@ -23,12 +23,13 @@
 import models
 import edi_service
 from models.edi import EDIMixin, edi_document
+_logger = logging.getLogger(__name__)
 
 # web
 try:
     import controllers
 except ImportError:
-    logging.getLogger('init.load').warn(
+    _logger.warn(
         """Could not load openerp-web section of EDI, EDI will not behave correctly
 
 To fix, launch openerp-web in embedded mode""")

=== modified file 'edi/edi_service.py'
--- edi/edi_service.py	2011-12-19 16:54:40 +0000
+++ edi/edi_service.py	2012-06-22 07:29:21 +0000
@@ -23,7 +23,7 @@
 import netsvc
 import openerp
 
-_logger = logging.getLogger('edi.service')
+_logger = logging.getLogger(__name__)
 
 class edi(netsvc.ExportService):
 

=== modified file 'edi/models/edi.py'
--- edi/models/edi.py	2012-01-13 11:17:36 +0000
+++ edi/models/edi.py	2012-06-22 07:29:21 +0000
@@ -35,6 +35,7 @@
 from osv import osv,fields,orm
 from tools.translate import _
 from tools.safe_eval import safe_eval as eval
+_logger = logging.getLogger(__name__)
 
 EXTERNAL_ID_PATTERN = re.compile(r'^([^.:]+)(?::([^.]+))?\.(\S+)$')
 EDI_VIEW_WEB_URL = '%s/edi/view?db=%s&token=%s'
@@ -72,7 +73,6 @@
         return record_log.get('write_date') or record_log.get('create_date') or False
     return False
 
-_logger = logging.getLogger('edi')
 
 class edi_document(osv.osv):
     _name = 'edi.document'

=== modified file 'edi/models/res_partner.py'
--- edi/models/res_partner.py	2012-03-30 09:08:37 +0000
+++ edi/models/res_partner.py	2012-06-22 07:29:21 +0000
@@ -24,6 +24,7 @@
 from edi import EDIMixin
 from openerp import SUPERUSER_ID
 from tools.translate import _
+_logger = logging.getLogger(__name__)
 
 RES_PARTNER_EDI_STRUCT = {
     'name': True,
@@ -63,7 +64,7 @@
         code, label = 'edi_generic', 'Generic Bank Type (auto-created for EDI)'
         bank_code_ids = res_partner_bank_type.search(cr, uid, [('code','=',code)], context=context)
         if not bank_code_ids:
-            logging.getLogger('edi.res_partner').info('Normal bank account type is missing, creating '
+            _logger.info('Normal bank account type is missing, creating '
                                                       'a generic bank account type for EDI.')
             self.res_partner_bank_type.create(cr, SUPERUSER_ID, {'name': label,
                                                                  'code': label})
@@ -84,7 +85,7 @@
                                              bank_name, ext_bank_id, context=import_ctx)
                 except osv.except_osv:
                     # failed to import it, try again with unrestricted default type
-                    logging.getLogger('edi.res_partner').warning('Failed to import bank account using'
+                    _logger.warning('Failed to import bank account using'
                                                                  'bank type: %s, ignoring', import_ctx['default_state'],
                                                                  exc_info=True)
         return contact_id

=== modified file 'email_template/email_template.py'
--- email_template/email_template.py	2012-05-25 13:35:36 +0000
+++ email_template/email_template.py	2012-06-22 07:29:21 +0000
@@ -29,11 +29,12 @@
 import tools
 from tools.translate import _
 from urllib import quote as quote
+_logger = logging.getLogger(__name__)
 
 try:
     from mako.template import Template as MakoTemplate
 except ImportError:
-    logging.getLogger('init').warning("email_template: mako templates not available, templating features will not work!")
+    _logger.warning("email_template: mako templates not available, templating features will not work!")
 
 class email_template(osv.osv):
     "Templates for sending email"
@@ -75,7 +76,7 @@
                 result = u''
             return result
         except Exception:
-            logging.exception("failed to render mako template value %r", template)
+            _logger.exception("failed to render mako template value %r", template)
             return u""
 
     def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None):

=== modified file 'fetchmail/fetchmail.py'
--- fetchmail/fetchmail.py	2012-05-04 11:57:48 +0000
+++ fetchmail/fetchmail.py	2012-06-22 07:29:21 +0000
@@ -39,7 +39,7 @@
 import tools
 from tools.translate import _
 
-logger = logging.getLogger('fetchmail')
+_logger = logging.getLogger(__name__)
 
 class fetchmail_server(osv.osv):
     """Incoming POP/IMAP mail server account"""

=== modified file 'hr/hr.py'
--- hr/hr.py	2012-06-14 14:31:07 +0000
+++ hr/hr.py	2012-06-22 07:29:21 +0000
@@ -25,6 +25,7 @@
 from osv import fields, osv
 from PIL import Image
 import StringIO
+_logger = logging.getLogger(__name__)
 
 class hr_employee_category(osv.osv):
 
@@ -304,7 +305,7 @@
                                             'user_id': user_id}, context=context)
             except:
                 # Tolerate a missing shortcut. See product/product.py for similar code.
-                logging.getLogger('orm').debug('Skipped meetings shortcut for user "%s"', data.get('name','<new'))
+                _logger.debug('Skipped meetings shortcut for user "%s"', data.get('name','<new'))
 
         return user_id
 

=== modified file 'import_base/import_framework.py'
--- import_base/import_framework.py	2011-12-31 07:57:20 +0000
+++ import_base/import_framework.py	2012-06-22 07:29:21 +0000
@@ -30,7 +30,7 @@
 import StringIO
 import traceback
 pp = pprint.PrettyPrinter(indent=4)
-
+_logger = logging.getLogger(__name__)
 
 
 
@@ -60,7 +60,6 @@
         self.context = context or {}
         self.email = email_to_notify
         self.table_list = []
-        self.logger = logging.getLogger(module_name)
         self.initialize()
 
     """
@@ -165,7 +164,7 @@
                 data_i is a map external field_name => value
                 and each data_i have a external id => in data_id['id']
         """
-        self.logger.info(' Importing %s into %s' % (table, model))
+        _logger.info(' Importing %s into %s' % (table, model))
         if not datas:
             return (0, 'No data found')
         mapping['id'] = 'id_new'
@@ -188,7 +187,7 @@
         model_obj = self.obj.pool.get(model)
         if not model_obj:
             raise ValueError(_("%s is not a valid model name") % model)
-        self.logger.debug(_(" fields imported : ") + str(fields))
+        _logger.debug(_(" fields imported : ") + str(fields))
         (p, r, warning, s) = model_obj.import_data(self.cr, self.uid, fields, res, mode='update', current_module=self.module_name, noupdate=True, context=self.context)
         for (field, field_name) in self_dependencies:
             self._import_self_dependencies(model_obj, field, datas)
@@ -431,9 +430,9 @@
             'auto_delete' : True})
         email_obj.send(self.cr, self.uid, [email_id])
         if error:
-            self.logger.error(_("Import failed due to an unexpected error"))
+            _logger.error(_("Import failed due to an unexpected error"))
         else:
-            self.logger.info(_("Import finished, notification email sended"))
+            _logger.info(_("Import finished, notification email sended"))
 
     def get_email_subject(self, result, error=False):
         """

=== modified file 'import_sugarcrm/sugar.py'
--- import_sugarcrm/sugar.py	2011-12-19 16:54:40 +0000
+++ import_sugarcrm/sugar.py	2012-06-22 07:29:21 +0000
@@ -33,7 +33,7 @@
 import logging
 
 import sys
-
+_logger = logging.getLogger(__name__)
 
 debug = False
 
@@ -119,7 +119,7 @@
                 email_list.append(list.Email_address)
         return email_list
     except Exception,e:
-        logging.getLogger('sugarcrm_soap').error('Exception: %s\n' % (tools.ustr(e)))
+        _logger.error('Exception: %s\n' % (tools.ustr(e)))
         return False
 
 def get_document_revision_search(portType, sessionid, module_id=None):

=== modified file 'l10n_be_invoice_bba/invoice.py'
--- l10n_be_invoice_bba/invoice.py	2012-06-13 09:06:00 +0000
+++ l10n_be_invoice_bba/invoice.py	2012-06-22 07:29:21 +0000
@@ -24,6 +24,7 @@
 from osv import fields, osv
 from tools.translate import _
 import logging
+_logger = logging.getLogger(__name__)
 
 """
 account.invoice object:
@@ -40,7 +41,7 @@
                 context=context)
         res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')
         res.append(('bba', 'BBA Structured Communication'))
-        #logging('l1on.be.invoice.bba').warning('reference_type =  %s' %res ) 
+        #l_logger.warning('reference_type =  %s' %res ) 
         return res
 
     def check_bbacomm(self, val):
@@ -67,7 +68,7 @@
         result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
             date_invoice, payment_term, partner_bank_id, company_id)
 #        reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']
-#        logging('l1on.be.invoice.bba').warning('partner_id %s' % partner_id)
+#        _logger.warning('partner_id %s' % partner_id)
         reference = False
         reference_type = 'none'
         if partner_id:        

=== modified file 'l10n_multilang/l10n_multilang.py'
--- l10n_multilang/l10n_multilang.py	2012-06-14 07:03:29 +0000
+++ l10n_multilang/l10n_multilang.py	2012-06-22 07:29:21 +0000
@@ -23,6 +23,7 @@
 import os
 from tools.translate import _
 import logging
+_logger = logging.getLogger(__name__)
 
 class wizard_multi_charts_accounts(osv.osv_memory):
     """
@@ -79,7 +80,7 @@
                         if context.get('lang') == lang:
                             self.pool.get(out_obj._name).write(cr, uid, out_ids[j], {in_field: value[in_id]})
                 else:
-                    logging.getLogger('l10n.multilang').info('Language: %s. Translation from template: there is no translation available for %s!' %(lang,  src[in_id]))#out_obj._name))
+                    _logger.info('Language: %s. Translation from template: there is no translation available for %s!' %(lang,  src[in_id]))#out_obj._name))
         return True
 
     def execute(self, cr, uid, ids, context=None):

=== modified file 'mail/mail_message.py'
--- mail/mail_message.py	2012-05-31 09:05:42 +0000
+++ mail/mail_message.py	2012-06-22 07:29:21 +0000
@@ -36,7 +36,7 @@
 from tools.translate import _
 from openerp import SUPERUSER_ID
 
-_logger = logging.getLogger('mail')
+_logger = logging.getLogger(__name__)
 
 def format_date_tz(date, tz=None):
     if not date:

=== modified file 'portal/wizard/portal_wizard.py'
--- portal/wizard/portal_wizard.py	2012-05-07 18:13:46 +0000
+++ portal/wizard/portal_wizard.py	2012-06-22 07:29:21 +0000
@@ -27,7 +27,7 @@
 from tools.translate import _
 
 from base.res.res_users import _lang_get
-
+_logger = logging.getLogger(__name__)
 
 
 # welcome email sent to new portal users (note that calling tools.translate._
@@ -174,7 +174,7 @@
                 body = _(WELCOME_EMAIL_BODY) % data
                 res = mail_message_obj.schedule_with_attach(cr, uid, email_from , [email_to], subject, body, context=context)
                 if not res:
-                    logging.getLogger('res.portal.wizard').warning(
+                    _logger.warning(
                         'Failed to send email from %s to %s', email_from, email_to)
         
         return {'type': 'ir.actions.act_window_close'}

=== modified file 'portal/wizard/share_wizard.py'
--- portal/wizard/share_wizard.py	2011-12-19 16:54:40 +0000
+++ portal/wizard/share_wizard.py	2012-06-22 07:29:21 +0000
@@ -21,6 +21,8 @@
 
 from osv import osv, fields
 from tools.translate import _
+import logging
+_logger = logging.getLogger(__name__)
 
 UID_ROOT = 1
 SHARED_DOCS_MENU = "Documents"
@@ -164,19 +166,19 @@
             # v6.1, the algorithm for combining them will OR the rules, hence
             # extending the visible data.
             Rules.write(cr, UID_ROOT, share_rule_ids, {'groups': [(4,target_group.id)]})
-            self._logger.debug("Linked sharing rules from temporary sharing group to group %s", target_group)
+            _logger.debug("Linked sharing rules from temporary sharing group to group %s", target_group)
 
             # Copy the access rights. This is appropriate too because
             # groups have the UNION of all permissions granted by their
             # access right lines.
             for access_line in share_group.model_access:
                 Rights.copy(cr, UID_ROOT, access_line.id, default={'group_id': target_group.id})
-            self._logger.debug("Copied access rights from temporary sharing group to group %s", target_group)
+            _logger.debug("Copied access rights from temporary sharing group to group %s", target_group)
 
         # finally, delete it after removing its users
         Groups.write(cr, UID_ROOT, [share_group_id], {'users': [(6,0,[])]})
         Groups.unlink(cr, UID_ROOT, [share_group_id])
-        self._logger.debug("Deleted temporary sharing group %s", share_group_id)
+        _logger.debug("Deleted temporary sharing group %s", share_group_id)
 
     def _finish_result_lines(self, cr, uid, wizard_data, share_group_id, context=None):
         super(share_wizard_portal,self)._finish_result_lines(cr, uid, wizard_data, share_group_id, context=context)

=== modified file 'report_webkit/webkit_report.py'
--- report_webkit/webkit_report.py	2012-02-28 14:08:16 +0000
+++ report_webkit/webkit_report.py	2012-06-22 07:29:21 +0000
@@ -51,7 +51,7 @@
 from tools.translate import _
 from osv.osv import except_osv
 
-logger = logging.getLogger('report_webkit')
+_logger = logging.getLogger(__name__)
 
 def mako_template(text):
     """Build a Mako template.
@@ -248,7 +248,7 @@
                     htmls.append(html)
                 except Exception, e:
                     msg = exceptions.text_error_template().render()
-                    logger.error(msg)
+                    _logger.error(msg)
                     raise except_osv(_('Webkit render'), msg)
         else:
             try :
@@ -259,7 +259,7 @@
                 htmls.append(html)
             except Exception, e:
                 msg = exceptions.text_error_template().render()
-                logger.error(msg)
+                _logger.error(msg)
                 raise except_osv(_('Webkit render'), msg)
         head_mako_tpl = mako_template(header)
         try :
@@ -281,7 +281,7 @@
                                             **self.parser_instance.localcontext)
             except:
                 msg = exceptions.text_error_template().render()
-                logger.error(msg)
+                _logger.error(msg)
                 raise except_osv(_('Webkit render'), msg)
         if report_xml.webkit_debug :
             try :
@@ -292,7 +292,7 @@
                                            **self.parser_instance.localcontext)
             except Exception, e:
                 msg = exceptions.text_error_template().render()
-                logger.error(msg)
+                _logger.error(msg)
                 raise except_osv(_('Webkit render'), msg)
             return (deb, 'html')
         bin = self.get_lib(cursor, uid)

=== modified file 'share/wizard/share_wizard.py'
--- share/wizard/share_wizard.py	2012-04-10 16:01:00 +0000
+++ share/wizard/share_wizard.py	2012-06-22 07:29:21 +0000
@@ -32,6 +32,7 @@
 from tools.translate import _
 from tools.safe_eval import safe_eval
 import openerp
+_logger = logging.getLogger(__name__)
 
 FULL_ACCESS = ('perm_read', 'perm_write', 'perm_create', 'perm_unlink')
 READ_WRITE_ACCESS = ('perm_read', 'perm_write')
@@ -48,7 +49,6 @@
     return ''.join(random.sample(RANDOM_PASS_CHARACTERS,10))
 
 class share_wizard(osv.osv_memory):
-    _logger = logging.getLogger('share.wizard')
     _name = 'share.wizard'
     _description = 'Share Wizard'
 
@@ -335,7 +335,7 @@
             except Exception:
                 # Note: must catch all exceptions, as UnquoteEvalContext may cause many
                 #       different exceptions, as it shadows builtins.
-                self._logger.debug("Failed to cleanup action context as it does not parse server-side", exc_info=True)
+                _logger.debug("Failed to cleanup action context as it does not parse server-side", exc_info=True)
                 result = context_str
         return result
 
@@ -496,8 +496,8 @@
             [x.id for x in current_user.groups_id], target_model_ids, context=context)
         group_access_map = self._get_access_map_for_groups_and_models(cr, uid,
             [group_id], target_model_ids, context=context)
-        self._logger.debug("Current user access matrix: %r", current_user_access_map)
-        self._logger.debug("New group current access matrix: %r", group_access_map)
+        _logger.debug("Current user access matrix: %r", current_user_access_map)
+        _logger.debug("New group current access matrix: %r", group_access_map)
 
         # Create required rights if allowed by current user rights and not
         # already granted
@@ -520,7 +520,7 @@
                     need_creation = True
             if need_creation:
                 model_access_obj.create(cr, UID_ROOT, values)
-                self._logger.debug("Creating access right for model %s with values: %r", model.model, values)
+                _logger.debug("Creating access right for model %s with values: %r", model.model, values)
 
     def _link_or_copy_current_user_rules(self, cr, current_user, group_id, fields_relations, context=None):
         rule_obj = self.pool.get('ir.rule')
@@ -542,13 +542,13 @@
                                 'groups': [(6,0,[group_id])],
                                 'domain_force': rule.domain, # evaluated version!
                             })
-                            self._logger.debug("Copying rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force)
+                            _logger.debug("Copying rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force)
                         else:
                             # otherwise we can simply link the rule to keep it dynamic
                             rule_obj.write(cr, 1, [rule.id], {
                                     'groups': [(4,group_id)]
                                 })
-                            self._logger.debug("Linking rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force)
+                            _logger.debug("Linking rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force)
 
     def _check_personal_rule_or_duplicate(self, cr, group_id, rule, context=None):
         """Verifies that the given rule only belongs to the given group_id, otherwise
@@ -567,7 +567,7 @@
                                        'groups': [(6,0,[group_id])],
                                        'domain_force': rule.domain_force, # non evaluated!
                                })
-        self._logger.debug("Duplicating rule %s (%s) (domain: %s) for modified access ", rule.name, rule.id, rule.domain_force)
+        _logger.debug("Duplicating rule %s (%s) (domain: %s) for modified access ", rule.name, rule.id, rule.domain_force)
         # then disconnect from group_id:
         rule.write({'groups':[(3,group_id)]}) # disconnects, does not delete!
         return rule_obj.browse(cr, UID_ROOT, new_id, context=context)
@@ -602,7 +602,7 @@
                     if restrict:
                         continue
                     else:
-                        self._logger.debug("Ignoring sharing rule on model %s with domain: %s the same rule exists already", model_id, domain)
+                        _logger.debug("Ignoring sharing rule on model %s with domain: %s the same rule exists already", model_id, domain)
                         return
                 if restrict:
                     # restricting existing rules is done by adding the clause
@@ -614,7 +614,7 @@
                     new_clause = expression.normalize(eval(domain, eval_ctx))
                     combined_domain = expression.AND([new_clause, org_domain])
                     rule.write({'domain_force': combined_domain, 'name': rule.name + _('(Modified)')})
-                    self._logger.debug("Combining sharing rule %s on model %s with domain: %s", rule.id, model_id, domain)
+                    _logger.debug("Combining sharing rule %s on model %s with domain: %s", rule.id, model_id, domain)
         if not rule_ids or not restrict:
             # Adding the new rule in the group is ok for normal cases, because rules
             # in the same group and for the same model will be combined with OR
@@ -625,7 +625,7 @@
                 'domain_force': domain,
                 'groups': [(4,group_id)]
                 })
-            self._logger.debug("Created sharing rule on model %s with domain: %s", model_id, domain)
+            _logger.debug("Created sharing rule on model %s with domain: %s", model_id, domain)
 
     def _create_indirect_sharing_rules(self, cr, current_user, wizard_data, group_id, fields_relations, context=None):
         rule_name = _('Indirect sharing filter created by user %s (%s) for group %s') % \
@@ -648,7 +648,7 @@
                          group_id, model_id=model.id, domain=str(related_domain),
                          rule_name=rule_name, restrict=True, context=context)
         except Exception:
-            self._logger.exception('Failed to create share access')
+            _logger.exception('Failed to create share access')
             raise osv.except_osv(_('Sharing access could not be created'),
                                  _('Sorry, the current screen and filter you are trying to share are not supported at the moment.\nYou may want to try a simpler filter.'))
 
@@ -852,7 +852,7 @@
             notification_obj.create(cr, uid, {'user_id': result_line.user_id.id, 'message_id': msg_id}, context=context)
     
     def send_emails(self, cr, uid, wizard_data, context=None):
-        self._logger.info('Sending share notifications by email...')
+        _logger.info('Sending share notifications by email...')
         mail_message = self.pool.get('mail.message')
         user = self.pool.get('res.users').browse(cr, UID_ROOT, uid)
         if not user.user_email:
@@ -885,7 +885,7 @@
             msg_ids.append(mail_message.schedule_with_attach(cr, uid, user.user_email, [email_to], subject, body, model='share.wizard', context=context))
         # force direct delivery, as users expect instant notification
         mail_message.send(cr, uid, msg_ids, context=context)
-        self._logger.info('%d share notification(s) sent.', len(msg_ids))
+        _logger.info('%d share notification(s) sent.', len(msg_ids))
 
     def onchange_embed_options(self, cr, uid, ids, opt_title, opt_search, context=None):
         wizard = self.browse(cr, uid, ids[0], context)

=== modified file 'stock/stock.py'
--- stock/stock.py	2012-05-25 10:26:43 +0000
+++ stock/stock.py	2012-06-22 07:29:21 +0000
@@ -33,7 +33,7 @@
 from tools import float_compare
 import decimal_precision as dp
 import logging
-
+_logger = logging.getLogger(__name__)
 
 #----------------------------------------------------------
 # Incoterms
@@ -419,9 +419,8 @@
                     # so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier
                     # state, we return False as if the products were not available, and log it:
                     cr.execute("ROLLBACK TO stock_location_product_reserve")
-                    logger = logging.getLogger('stock.location')
-                    logger.warn("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
-                    logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
+                    _logger.warn("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
+                    _logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
                     return False
 
             # XXX TODO: rewrite this with one single query, possibly even the quantity conversion

=== modified file 'stock_planning/stock_planning.py'
--- stock_planning/stock_planning.py	2012-05-22 16:10:55 +0000
+++ stock_planning/stock_planning.py	2012-06-22 07:29:21 +0000
@@ -29,7 +29,7 @@
 import logging
 import decimal_precision as dp
 
-_logger = logging.getLogger('mps') 
+_logger = logging.getLogger(__name__) 
 
 
 def rounding(fl, round_value):

=== modified file 'users_ldap/users_ldap.py'
--- users_ldap/users_ldap.py	2012-03-30 01:14:07 +0000
+++ users_ldap/users_ldap.py	2012-06-22 07:29:21 +0000
@@ -27,6 +27,7 @@
 import tools
 from osv import fields, osv
 from openerp import SUPERUSER_ID
+_logger = logging.getLogger(__name__)
 
 class CompanyLDAP(osv.osv):
     _name = 'res.company.ldap'
@@ -107,8 +108,7 @@
         except ldap.INVALID_CREDENTIALS:
             return False
         except ldap.LDAPError, e:
-            logger = logging.getLogger('orm.ldap')
-            logger.error('An LDAP exception occurred: %s', e)
+            _logger.error('An LDAP exception occurred: %s', e)
         return entry
         
     def query(self, conf, filter, retrieve_attributes=None):
@@ -135,7 +135,6 @@
         """
 
         results = []
-        logger = logging.getLogger('orm.ldap')
         try:
             conn = self.connect(conf)
             conn.simple_bind_s(conf['ldap_binddn'] or '',
@@ -144,9 +143,9 @@
                                      filter, retrieve_attributes, timeout=60)
             conn.unbind()
         except ldap.INVALID_CREDENTIALS:
-            logger.error('LDAP bind failed.')
+            _logger.error('LDAP bind failed.')
         except ldap.LDAPError, e:
-            logger.error('An LDAP exception occurred: %s', e)
+            _logger.error('An LDAP exception occurred: %s', e)
         return results
 
     def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry):
@@ -188,8 +187,7 @@
             if res[1]:
                 user_id = res[0]
         elif conf['create_user']:
-            logger = logging.getLogger('orm.ldap')
-            logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login)
+            _logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login)
             user_obj = self.pool.get('res.users')
             values = self.map_ldap_attributes(cr, uid, conf, login, ldap_entry)
             if conf['user']:

_______________________________________________
Mailing list: https://launchpad.net/~openerp-dev-gtk
Post to     : [email protected]
Unsubscribe : https://launchpad.net/~openerp-dev-gtk
More help   : https://help.launchpad.net/ListHelp

Reply via email to