Projects
Kolab:16
pykolab
Log In
Username
Password
Overview
Repositories
Revisions
Requests
Users
Attributes
Meta
Expand all
Collapse all
Changes of Revision 51
View file
pykolab.spec
Changed
@@ -42,8 +42,6 @@ Source0: pykolab-%{version}.tar.gz Source1: pykolab.logrotate -Patch0001: 0001-Add-mysqlhost.patch - BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root BuildArch: noarch @@ -232,8 +230,6 @@ %prep %setup -q -%patch0001 -p1 - %build autoreconf -v || automake --add-missing && autoreconf -v %configure
View file
0001-Add-mysqlhost.patch
Deleted
@@ -1,203 +0,0 @@ -From 894df668e96af15b172eff9b0e4ffcb42aa76084 Mon Sep 17 00:00:00 2001 -From: "Jeroen van Meeuwen (Kolab Systems)" <vanmeeuwen@kolabsys.com> -Date: Wed, 25 Sep 2019 08:10:49 +0200 -Subject: [PATCH] Add --mysqlhost - ---- - pykolab/setup/setup_mysql.py | 105 +++++++++++++++++-------------- - pykolab/setup/setup_roundcube.py | 3 +- - pykolab/setup/setup_syncroton.py | 13 +++- - 3 files changed, 72 insertions(+), 49 deletions(-) - -diff --git a/pykolab/setup/setup_mysql.py b/pykolab/setup/setup_mysql.py -index 807bc7f..d5e62a0 100644 ---- a/pykolab/setup/setup_mysql.py -+++ b/pykolab/setup/setup_mysql.py -@@ -48,6 +48,14 @@ def cli_options(): - help=_("Specify whether to use an (existing) or (new) MySQL server.") - ) - -+ mysql_group.add_option( -+ "--mysqlhost", -+ dest="mysqlhost", -+ action="store", -+ default='127.0.0.1', -+ help=_("The MySQL host address.") -+ ) -+ - mysql_group.add_option( - "--mysqlrootpw", - dest="mysqlrootpw", -@@ -70,45 +78,46 @@ def execute(*args, **kw): # noqa: C901 - ] - - # on CentOS7, there is MariaDB instead of MySQL -- mysqlservice = 'mysqld.service' -- if os.path.isfile('/usr/lib/systemd/system/mariadb.service'): -- mysqlservice = 'mariadb.service' -- elif os.path.isfile('/usr/lib/systemd/system/mysql.service'): -- mysqlservice = 'mysql.service' -- if not os.path.isfile('/usr/lib/systemd/system/' + mysqlservice): -- # on Debian Jessie, systemctl restart mysql -- mysqlservice = 'mysql' -- -- if os.path.isfile('/bin/systemctl'): -- subprocess.call(['/bin/systemctl', 'restart', mysqlservice]) -- elif os.path.isfile('/sbin/service'): -- subprocess.call(['/sbin/service', 'mysqld', 'restart']) -- elif os.path.isfile('/usr/sbin/service'): -- subprocess.call(['/usr/sbin/service', 'mysql', 'restart']) -- else: -- log.error(_("Could not start the MySQL database service.")) -- -- if os.path.isfile('/bin/systemctl'): -- subprocess.call(['/bin/systemctl', 'enable', mysqlservice]) -- elif os.path.isfile('/sbin/chkconfig'): -- subprocess.call(['/sbin/chkconfig', 'mysqld', 'on']) -- elif os.path.isfile('/usr/sbin/update-rc.d'): -- subprocess.call(['/usr/sbin/update-rc.d', 'mysql', 'defaults']) -- else: -- log.error( -- _("Could not configure to start on boot, the MySQL database service.") -- ) -+ if conf.mysqlserver != 'existing': -+ mysqlservice = 'mysqld.service' -+ if os.path.isfile('/usr/lib/systemd/system/mariadb.service'): -+ mysqlservice = 'mariadb.service' -+ elif os.path.isfile('/usr/lib/systemd/system/mysql.service'): -+ mysqlservice = 'mysql.service' -+ if not os.path.isfile('/usr/lib/systemd/system/' + mysqlservice): -+ # on Debian Jessie, systemctl restart mysql -+ mysqlservice = 'mysql' -+ -+ if os.path.isfile('/bin/systemctl'): -+ subprocess.call(['/bin/systemctl', 'restart', mysqlservice]) -+ elif os.path.isfile('/sbin/service'): -+ subprocess.call(['/sbin/service', 'mysqld', 'restart']) -+ elif os.path.isfile('/usr/sbin/service'): -+ subprocess.call(['/usr/sbin/service', 'mysql', 'restart']) -+ else: -+ log.error(_("Could not start the MySQL database service.")) -+ -+ if os.path.isfile('/bin/systemctl'): -+ subprocess.call(['/bin/systemctl', 'enable', mysqlservice]) -+ elif os.path.isfile('/sbin/chkconfig'): -+ subprocess.call(['/sbin/chkconfig', 'mysqld', 'on']) -+ elif os.path.isfile('/usr/sbin/update-rc.d'): -+ subprocess.call(['/usr/sbin/update-rc.d', 'mysql', 'defaults']) -+ else: -+ log.error( -+ _("Could not configure to start on boot, the MySQL database service.") -+ ) - -- log.info(_("Waiting for at most 30 seconds for MySQL/MariaDB to settle...")) -- max_wait = 30 -- while max_wait > 0: -- for socket_path in socket_paths: -- if os.path.exists(socket_path): -- max_wait = 0 -+ log.info(_("Waiting for at most 30 seconds for MySQL/MariaDB to settle...")) -+ max_wait = 30 -+ while max_wait > 0: -+ for socket_path in socket_paths: -+ if os.path.exists(socket_path): -+ max_wait = 0 - -- if max_wait > 0: -- max_wait = max_wait - 1 -- time.sleep(1) -+ if max_wait > 0: -+ max_wait = max_wait - 1 -+ time.sleep(1) - - options = { - 1: "Existing MySQL server (with root password already set).", -@@ -116,14 +125,17 @@ def execute(*args, **kw): # noqa: C901 - } - - answer = 0 -- if len([x for x in socket_paths if os.path.exists(x)]) > 0: -- if conf.mysqlserver: -- if conf.mysqlserver == 'existing': -- answer = 1 -- elif conf.mysqlserver == 'new': -- answer = 2 -- if answer == 0: -- answer = utils.ask_menu(_("What MySQL server are we setting up?"), options) -+ if conf.mysqlserver != 'existing': -+ if len([x for x in socket_paths if os.path.exists(x)]) > 0: -+ if conf.mysqlserver: -+ if conf.mysqlserver == 'existing': -+ answer = 1 -+ elif conf.mysqlserver == 'new': -+ answer = 2 -+ if answer == 0: -+ answer = utils.ask_menu(_("What MySQL server are we setting up?"), options) -+ else: -+ answer = 1 - - if answer == "1" or answer == 1: - if not conf.mysqlrootpw: -@@ -214,7 +226,8 @@ def execute(*args, **kw): # noqa: C901 - [mysql] - user=root - password='%s' --""" % (mysql_root_password) -+host=%s -+""" % (mysql_root_password, conf.mysqlhost) - - fp = open('/tmp/kolab-setup-my.cnf', 'w') - os.chmod('/tmp/kolab-setup-my.cnf', 600) -diff --git a/pykolab/setup/setup_roundcube.py b/pykolab/setup/setup_roundcube.py -index 1be5cb6..36c7aa7 100644 ---- a/pykolab/setup/setup_roundcube.py -+++ b/pykolab/setup/setup_roundcube.py -@@ -232,7 +232,8 @@ def execute(*args, **kw): - [mysql] - user=root - password='%s' --""" % (mysql_root_password) -+host=%s -+""" % (mysql_root_password, conf.mysqlhost) - - fp = open('/tmp/kolab-setup-my.cnf', 'w') - os.chmod('/tmp/kolab-setup-my.cnf', 600) -diff --git a/pykolab/setup/setup_syncroton.py b/pykolab/setup/setup_syncroton.py -index 5b9f915..446577f 100644 ---- a/pykolab/setup/setup_syncroton.py -+++ b/pykolab/setup/setup_syncroton.py -@@ -33,12 +33,20 @@ from pykolab.translate import _ - log = pykolab.getLogger('pykolab.setup') - conf = pykolab.getConf() - -+ - def __init__(): -- components.register('syncroton', execute, description=description(), after=['mysql','ldap','roundcube']) -+ components.register( -+ 'syncroton', -+ execute, -+ description=description(), -+ after=['mysql','ldap','roundcube'] -+ ) -+ - - def description(): - return _("Setup Syncroton.") - -+ - def execute(*args, **kw): - schema_files = [] - for root, directories, filenames in os.walk('/usr/share/doc/'): -@@ -71,7 +79,8 @@ def execute(*args, **kw): - [mysql] - user=root - password='%s' --""" % (mysql_root_password) -+host=%s -+""" % (mysql_root_password, conf.mysqlhost) - - fp = open('/tmp/kolab-setup-my.cnf', 'w') - os.chmod('/tmp/kolab-setup-my.cnf', 0600) --- -2.21.0 -
View file
debian.changelog
Changed
@@ -1,3 +1,9 @@ +pykolab (0.8.15-0~kolab2) unstable; urgency=low + + * Fix init scripts for kolab-saslauthd and wallace + + -- Daniel Hoffend <dh@dotlan.net> Fri, 15 Nov 2019 01:35:13 +0100 + pykolab (0.8.15-0~kolab1) unstable; urgency=low * Release of version 0.8.15
View file
debian.tar.gz/kolab-saslauthd.init
Changed
@@ -103,6 +103,7 @@ --stop \ --quiet \ --retry=TERM/30/KILL/5 \ + --user $USER \ --startas $DAEMON \ --pidfile $PIDFILE RETVAL="$?"
View file
debian.tar.gz/kolab-server.init
Changed
@@ -91,6 +91,7 @@ --retry=TERM/30/KILL/5 \ --user $USER \ --startas $DAEMON \ + --pidfile $PIDFILE \ --name $NAME RETVAL="$?" [ "$RETVAL" = 2 ] && return 2
View file
debian.tar.gz/wallace.init
Changed
@@ -89,6 +89,7 @@ --stop \ --quiet \ --retry=TERM/30/KILL/5 \ + --user $USER \ --startas $DAEMON \ --pidfile $PIDFILE \ --name $PROG_NAME @@ -100,7 +101,7 @@ # that waits for the process to drop all resources that could be # needed by services started subsequently. A last resort is to # sleep for some time. - start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --user $USER --exec $DAEMON + start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON [ "$?" = 2 ] && return 2 # Many daemons don't delete their pidfiles when they exit. rm -f $PIDFILE
View file
pykolab-0.8.15.tar.gz/.flake8 -> pykolab-0.8.16.tar.gz/.flake8
Changed
@@ -5,6 +5,8 @@ docs/source/conf.py ignore = + # "Too complex"? Sure, for people that eat Hawaii pizza... + C901, # 'something' imported but unused F401, # 'from module import *' used: unable to detect undefined names
View file
pykolab-0.8.15.tar.gz/.pylintrc -> pykolab-0.8.16.tar.gz/.pylintrc
Changed
@@ -1,11 +1,13 @@ [MASTER] disable= + broad-except, cyclic-import, duplicate-code, + logging-not-lazy, missing-docstring, unused-argument, unused-wildcard-import, wildcard-import function-rgx=[a-z_][a-z0-9_]{2,90}$ -init-hook="import sys; sys.path.insert(0, './data/')" +init-hook="import sys; sys.path.insert(0, '.')"
View file
pykolab-0.8.15.tar.gz/conf/kolab.conf -> pykolab-0.8.16.tar.gz/conf/kolab.conf
Changed
@@ -167,6 +167,9 @@ ; The URI to LDAP ldap_uri = ldap://localhost:389 +; A timeout, in seconds, for regular searches such as authentication requests. +timeout = 10 + ; A list of integers containing supported controls, to increase the efficiency ; of individual short-lived connections with LDAP. supported_controls = 0,2,3
View file
pykolab-0.8.15.tar.gz/configure.ac -> pykolab-0.8.16.tar.gz/configure.ac
Changed
@@ -1,4 +1,4 @@ -AC_INIT([pykolab], 0.8.15) +AC_INIT([pykolab], 0.8.16) AC_SUBST([RELEASE], 1) AC_CONFIG_SRCDIR(pykolab/constants.py.in)
View file
pykolab-0.8.15.tar.gz/kolabd.py -> pykolab-0.8.16.tar.gz/kolabd.py
Changed
@@ -8,28 +8,33 @@ # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. - +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. - +# # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # +from __future__ import print_function + +import os import sys # For development purposes -sys.path = [ '.' ] + sys.path +if os.path.isdir(os.path.join(os.path.dirname(__file__), '.git')): + sys.path.insert(0, '.') from pykolab.translate import _ + try: from pykolab.constants import * -except ImportError, e: - print >> sys.stderr, _("Cannot load pykolab/constants.py:") - print >> sys.stderr, "%s" % e +except ImportError as errmsg: + print(_("Cannot load pykolab/constants.py:"), file=sys.stderr) + print("%s" % (errmsg), file=sys.stderr) sys.exit(1) import kolabd
View file
pykolab-0.8.15.tar.gz/kolabd/__init__.py -> pykolab-0.8.16.tar.gz/kolabd/__init__.py
Changed
@@ -20,6 +20,8 @@ The Kolab daemon. """ +from __future__ import print_function + import grp import os import pwd @@ -33,77 +35,77 @@ from pykolab.auth import Auth from pykolab import constants from pykolab import utils -from pykolab.translate import _ +from pykolab.translate import _ as _l -from process import KolabdProcess as Process +from .process import KolabdProcess as Process +# pylint: disable=invalid-name log = pykolab.getLogger('pykolab.daemon') conf = pykolab.getConf() -class KolabDaemon(object): +class KolabDaemon: def __init__(self): """ The main Kolab Groupware daemon process. """ - daemon_group = conf.add_cli_parser_option_group(_("Daemon Options")) + daemon_group = conf.add_cli_parser_option_group(_l("Daemon Options")) daemon_group.add_option( - "--fork", - dest = "fork_mode", - action = "store_true", - default = False, - help = _("Fork to the background.") - ) + "--fork", + dest="fork_mode", + action="store_true", + default=False, + help=_l("Fork to the background.") + ) daemon_group.add_option( - "-p", - "--pid-file", - dest = "pidfile", - action = "store", - default = "/var/run/kolabd/kolabd.pid", - help = _("Path to the PID file to use.") - ) + "-p", "--pid-file", + dest="pidfile", + action="store", + default="/var/run/kolabd/kolabd.pid", + help=_l("Path to the PID file to use.") + ) daemon_group.add_option( - "-u", - "--user", - dest = "process_username", - action = "store", - default = "kolab", - help = _("Run as user USERNAME"), - metavar = "USERNAME" - ) + "-u", "--user", + dest="process_username", + action="store", + default="kolab", + help=_l("Run as user USERNAME"), + metavar="USERNAME" + ) daemon_group.add_option( - "-g", - "--group", - dest = "process_groupname", - action = "store", - default = "kolab", - help = _("Run as group GROUPNAME"), - metavar = "GROUPNAME" - ) + "-g", "--group", + dest="process_groupname", + action="store", + default="kolab", + help=_l("Run as group GROUPNAME"), + metavar="GROUPNAME" + ) conf.finalize_conf() + # pylint: disable=too-many-branches + # pylint: disable=too-many-statements def run(self): """Run Forest, RUN!""" exitcode = 0 utils.ensure_directory( - os.path.dirname(conf.pidfile), - conf.process_username, - conf.process_groupname - ) + os.path.dirname(conf.pidfile), + conf.process_username, + conf.process_groupname + ) try: try: - (ruid, euid, suid) = os.getresuid() - (rgid, egid, sgid) = os.getresgid() - except AttributeError, errmsg: + (ruid, _, _) = os.getresuid() + (rgid, _, _) = os.getresgid() + except AttributeError: ruid = os.getuid() rgid = os.getgid() @@ -112,64 +114,47 @@ if rgid == 0: # Get group entry details try: - ( - group_name, - group_password, - group_gid, - group_members - ) = grp.getgrnam(conf.process_groupname) + (_, _, group_gid, _) = grp.getgrnam(conf.process_groupname) except KeyError: - print >> sys.stderr, _("Group %s does not exist") % ( - conf.process_groupname - ) + log.error( + _l("Group %s does not exist") % (conf.process_groupname) + ) sys.exit(1) # Set real and effective group if not the same as current. if not group_gid == rgid: log.debug( - _("Switching real and effective group id to %d") % ( - group_gid - ), - level=8 - ) + _l("Switching real and effective group id to %d") % (group_gid), + level=8 + ) os.setregid(group_gid, group_gid) if ruid == 0: # Means we haven't switched yet. try: - ( - user_name, - user_password, - user_uid, - user_gid, - user_gecos, - user_homedir, - user_shell - ) = pwd.getpwnam(conf.process_username) + (_, _, user_uid, _, _, _, _) = pwd.getpwnam(conf.process_username) except KeyError: - print >> sys.stderr, _("User %s does not exist") % ( - conf.process_username - ) + log.error( + _l("User %s does not exist") % (conf.process_username) + ) sys.exit(1) # Set real and effective user if not the same as current. if not user_uid == ruid: log.debug( - _("Switching real and effective user id to %d") % ( - user_uid - ), - level=8 - ) + _l("Switching real and effective user id to %d") % (user_uid), + level=8 + ) os.setreuid(user_uid, user_uid) - except: - log.error(_("Could not change real and effective uid and/or gid")) + except Exception: + log.error(_l("Could not change real and effective uid and/or gid")) try: pid = os.getpid() @@ -211,37 +196,36 @@ self.write_pid() self.do_sync() - except SystemExit, errcode: + except SystemExit as errcode: exitcode = errcode except KeyboardInterrupt: exitcode = 1 - log.info(_("Interrupted by user")) + log.info(_l("Interrupted by user")) - except AttributeError, errmsg: + except AttributeError: exitcode = 1 traceback.print_exc() - print >> sys.stderr, _("Traceback occurred, please report a " + - "bug at https://issues.kolab.org") + print(_l("Traceback occurred, please report a bug"), file=sys.stderr) - except TypeError, errmsg: + except TypeError as errmsg: exitcode = 1 traceback.print_exc() - log.error(_("Type Error: %s") % errmsg) + log.error(_l("Type Error: %s") % errmsg) - except: + except Exception: exitcode = 2 traceback.print_exc() - print >> sys.stderr, _("Traceback occurred, please report a " + - "bug at https://issues.kolab.org") + print(_l("Traceback occurred, please report a bug"), file=sys.stderr) sys.exit(exitcode) + # pylint: disable=no-self-use + # pylint: disable=too-many-branches + # pylint: disable=too-many-locals def do_sync(self): domain_auth = {} - pid = os.getpid() - primary_domain = conf.get('kolab', 'primary_domain') while 1: @@ -252,23 +236,23 @@ try: primary_auth.connect() connected = True - except Exception, errmsg: + except Exception as errmsg: connected = False - log.error(_("Could not connect to LDAP, is it running?")) + log.error(_l("Could not connect to LDAP, is it running?")) + log.error(_l("Error: %r") % (errmsg)) + log.error("Traceback: %r" % (traceback.format_exc())) time.sleep(5) - log.debug(_("Listing domains..."), level=5) - - start = time.time() + log.debug(_l("Listing domains..."), level=5) try: domains = primary_auth.list_domains() - except: + except Exception: time.sleep(60) continue - if isinstance(domains, list) and len(domains) < 1: - log.error(_("No domains. Not syncing")) + if not domains: + log.error(_l("No domains. Not syncing")) time.sleep(5) continue @@ -289,29 +273,32 @@ for primary in primaries: naming_context = primary_auth.domain_naming_context(primary) + + # pylint: disable=protected-access domain_root_dn = primary_auth._auth._kolab_domain_root_dn(primary) + log.debug( - _("Domain %r naming context: %r, root dn: %r") % ( - primary, - naming_context, - domain_root_dn - ), - level=8 - ) + _l("Domain %r naming context: %r, root dn: %r") % ( + primary, + naming_context, + domain_root_dn + ), + level=8 + ) domain_root_dns[primary] = domain_root_dn naming_contexts[primary] = naming_context log.debug( - _("Naming contexts to synchronize: %r") % ( - list(set(naming_contexts.values())) - ), - level=8 - ) + _l("Naming contexts to synchronize: %r") % ( + list(set(naming_contexts.values())) + ), + level=8 + ) # Find however many naming contexts we have, and what the # corresponding domain name is for them. - primary_domains = [x for x,y in naming_contexts.iteritems() if domain_root_dns[x] == y] + primary_domains = [x for x, y in naming_contexts.items() if domain_root_dns[x] == y] # Now we can check if any changes happened. added_domains = [] @@ -321,41 +308,44 @@ # accounted for locally. all_domains = list(set(primary_domains + domain_auth.keys())) - log.debug(_("Result set of domains: %r") % (all_domains), level=8) + log.debug(_l("Result set of domains: %r") % (all_domains), level=8) for domain in all_domains: - log.debug(_("Checking for domain %s") % (domain), level=8) + log.debug(_l("Checking for domain %s") % (domain), level=8) if domain in domain_auth.keys() and domain in primary_domains: if not domain_auth[domain].is_alive(): - log.debug(_("Domain %s isn't alive anymore.") % (domain), level=8) + log.debug(_l("Domain %s isn't alive anymore.") % (domain), level=8) domain_auth[domain].terminate() added_domains.append(domain) else: - log.debug(_("Domain %s already there and alive.") % (domain), level=8) + log.debug(_l("Domain %s already there and alive.") % (domain), level=8) continue elif domain in domain_auth.keys(): - log.debug(_("Domain %s should not exist any longer.") % (domain), level=8) + log.debug(_l("Domain %s should not exist any longer.") % (domain), level=8) removed_domains.append(domain) else: - log.debug(_("Domain %s does not have a process yet.") % (domain), level=8) + log.debug(_l("Domain %s does not have a process yet.") % (domain), level=8) added_domains.append(domain) - if len(removed_domains) == 0 and len(added_domains) == 0: + if not removed_domains and not added_domains: try: - sleep_between_domain_operations_in_seconds = (float)(conf.get('kolab', 'domain_sync_interval')) + sleep_between_domain_operations_in_seconds = (float)( + conf.get( + 'kolab', + 'domain_sync_interval' + ) + ) + time.sleep(sleep_between_domain_operations_in_seconds) except ValueError: time.sleep(600) log.debug( - _("added domains: %r, removed domains: %r") % ( - added_domains, - removed_domains - ), - level=8 - ) + _l("added domains: %r, removed domains: %r") % (added_domains, removed_domains), + level=8 + ) for domain in added_domains: domain_auth[domain] = Process(domain) @@ -383,7 +373,7 @@ if os.access(conf.pidfile, os.R_OK): try: os.remove(conf.pidfile) - except: + except Exception: pass raise SystemExit
View file
pykolab-0.8.15.tar.gz/kolabd/process.py -> pykolab-0.8.16.tar.gz/kolabd/process.py
Changed
@@ -17,13 +17,13 @@ # import multiprocessing -import os import time import pykolab from pykolab.auth import Auth from pykolab.translate import _ +# pylint: disable=invalid-name log = pykolab.getLogger('pykolab.daemon') conf = pykolab.getConf() @@ -33,11 +33,11 @@ self.domain = domain log.debug(_("Process created for domain %s") % (domain), level=8) multiprocessing.Process.__init__( - self, - target=self.synchronize, - args=(domain,), - name="Kolab(%s)" % domain - ) + self, + target=self.synchronize, + args=(domain,), + name="Kolab(%s)" % domain + ) def synchronize(self, domain): log.debug(_("Synchronizing for domain %s") % (domain), level=8) @@ -56,7 +56,7 @@ time.sleep(sync_interval) except KeyboardInterrupt: break - except Exception, errmsg: + except Exception as errmsg: log.error(_("Error in process %r, terminating:\n\t%r") % (self.name, errmsg)) import traceback traceback.print_exc()
View file
pykolab-0.8.15.tar.gz/pykolab/auth/__init__.py -> pykolab-0.8.16.tar.gz/pykolab/auth/__init__.py
Changed
@@ -21,14 +21,17 @@ import time import pykolab -import pykolab.base +from pykolab.base import Base from pykolab.translate import _ +# pylint: disable=invalid-name log = pykolab.getLogger('pykolab.auth') conf = pykolab.getConf() -class Auth(pykolab.base.Base): + +# pylint: disable=too-many-public-methods +class Auth(Base): """ This is the Authentication and Authorization module for PyKolab. """ @@ -37,7 +40,7 @@ """ Initialize the authentication class. """ - pykolab.base.Base.__init__(self, domain=domain) + Base.__init__(self, domain=domain) self._auth = None @@ -103,59 +106,59 @@ section = domain log.debug( - _("Using section %s and domain %s") % (section,domain), - level=8 - ) + _("Using section %s and domain %s") % (section, domain), + level=8 + ) - if not self.domains == None and self.domains.has_key(domain): + if self.domains is not None and domain in self.domains: section = self.domains[domain] domain = self.domains[domain] log.debug( - _("Using section %s and domain %s") % (section,domain), - level=8 - ) + _("Using section %s and domain %s") % (section, domain), + level=8 + ) log.debug( - _("Connecting to Authentication backend for domain %s") % ( - domain - ), - level=8 - ) + _("Connecting to Authentication backend for domain %s") % ( + domain + ), + level=8 + ) if not conf.has_section(section): section = 'kolab' if not conf.has_option(section, 'auth_mechanism'): log.debug( - _("Section %s has no option 'auth_mechanism'") % (section), - level=8 - ) + _("Section %s has no option 'auth_mechanism'") % (section), + level=8 + ) section = 'kolab' else: log.debug( - _("Section %s has auth_mechanism: %r") % ( - section, - conf.get(section,'auth_mechanism') - ), - level=8 - ) + _("Section %s has auth_mechanism: %r") % ( + section, + conf.get(section, 'auth_mechanism') + ), + level=8 + ) # Get the actual authentication and authorization backend. if conf.get(section, 'auth_mechanism') == 'ldap': log.debug(_("Starting LDAP..."), level=8) - from pykolab.auth import ldap - self._auth = ldap.LDAP(self.domain) + from pykolab.auth.ldap import LDAP + self._auth = LDAP(self.domain) - elif conf.get(section, 'auth_mechanism') == 'sql': - from pykolab.auth import sql - self._auth = sql.SQL(self.domain) + # elif conf.get(section, 'auth_mechanism') == 'sql': + # from .sql import SQL + # self._auth = SQL(self.domain) else: log.debug(_("Starting LDAP..."), level=8) - from pykolab.auth import ldap - self._auth = ldap.LDAP(self.domain) + from pykolab.auth.ldap import LDAP + self._auth = LDAP(self.domain) self._auth.connect() @@ -165,13 +168,10 @@ back to the primary domain specified by the configuration. """ - if domain == None: - section = 'kolab' + if domain is None: domain = conf.get('kolab', 'primary_domain') - else: - section = domain - if not self._auth or self._auth == None: + if not self._auth: return self._auth._disconnect()
View file
pykolab-0.8.15.tar.gz/pykolab/auth/ldap/__init__.py -> pykolab-0.8.16.tar.gz/pykolab/auth/ldap/__init__.py
Changed
@@ -6,103 +6,57 @@ # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. - +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. - +# # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # +# pylint: disable=too-many-lines + +from __future__ import print_function import datetime -import _ldap -import ldap -import ldap.async -import ldap.controls -import ldap.filter +# Catch python-ldap-2.4 changes +from distutils import version import logging import time import traceback -import pykolab -import pykolab.base - -from pykolab import utils -from pykolab.constants import * -from pykolab.errors import * -from pykolab.translate import _ - -log = pykolab.getLogger('pykolab.auth') -conf = pykolab.getConf() - -import auth_cache -import cache - -# Catch python-ldap-2.4 changes -from distutils import version - -if version.StrictVersion('2.4.0') <= version.StrictVersion(ldap.__version__): - LDAP_CONTROL_PAGED_RESULTS = ldap.CONTROL_PAGEDRESULTS -else: - LDAP_CONTROL_PAGED_RESULTS = ldap.LDAP_CONTROL_PAGE_OID - +import ldap +import ldap.controls try: from ldap.controls import psearch -except: - log.warning(_("Python LDAP library does not support persistent search")) +except ImportError: + pass -class SimplePagedResultsControl(ldap.controls.SimplePagedResultsControl): - """ +from ldap.dn import explode_dn - Python LDAP 2.4 and later breaks the API. This is an abstraction class - so that we can handle either. - """ +import ldap.filter - def __init__(self, page_size=0, cookie=''): - if version.StrictVersion( - '2.4.0' - ) <= version.StrictVersion( - ldap.__version__ - ): +from six import string_types +import _ldap - ldap.controls.SimplePagedResultsControl.__init__( - self, - size=page_size, - cookie=cookie - ) - else: - ldap.controls.SimplePagedResultsControl.__init__( - self, - LDAP_CONTROL_PAGED_RESULTS, - True, - (page_size, '') - ) +import pykolab - def cookie(self): - if version.StrictVersion( - '2.4.0' - ) <= version.StrictVersion( - ldap.__version__ - ): +from pykolab import utils +from pykolab.base import Base +from pykolab.constants import SUPPORTED_LDAP_CONTROLS +from pykolab.errors import * +from pykolab.translate import _ as _l - return self.cookie - else: - return self.controlValue[1] +import auth_cache +import cache - def size(self): - if version.StrictVersion( - '2.4.0' - ) <= version.StrictVersion( - ldap.__version__ - ): +# pylint: disable=invalid-name +log = pykolab.getLogger('pykolab.auth') +conf = pykolab.getConf() - return self.size - else: - return self.controlValue[0] -class LDAP(pykolab.base.Base): +class LDAP(Base): """ Abstraction layer for the LDAP authentication / authorization backend, for use with Kolab. @@ -113,17 +67,21 @@ Initialize the LDAP object for domain. If no domain is specified, domain name space configured as 'kolab'.'primary_domain' is used. """ - pykolab.base.Base.__init__(self, domain=domain) + Base.__init__(self, domain=domain) self.ldap = None self.ldap_priv = None self.bind = None - if domain == None: + if domain is None: self.domain = conf.get('kolab', 'primary_domain') else: self.domain = domain + # pylint: disable=too-many-branches + # pylint: disable=too-many-locals + # pylint: disable=too-many-return-statements + # pylint: disable=too-many-statements def authenticate(self, login, realm): """ Find the entry corresponding to login, and attempt a bind. @@ -144,13 +102,14 @@ try: log.debug( - _("Attempting to authenticate user %s in realm %s") % ( - login[0], - realm - ), - level=8 - ) - except: + _l("Attempting to authenticate user %s in realm %s") % ( + login[0], + realm + ), + level=8 + ) + + except Exception: pass self.connect(immediate=True) @@ -161,31 +120,29 @@ try: base_dn = auth_cache.get_entry(self.domain) - except Exception, errmsg: - log.error(_("Authentication cache failed: %r") % (errmsg)) - pass + except Exception as errmsg: + log.error(_l("Authentication cache failed: %r") % (errmsg)) - if base_dn == None: + if base_dn is None: config_base_dn = self.config_get('base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: base_dn = ldap_base_dn else: base_dn = config_base_dn try: auth_cache.set_entry(self.domain, base_dn) - except Exception, errmsg: - log.error(_("Authentication cache failed: %r") % (errmsg)) - pass + except Exception as errmsg: + log.error(_l("Authentication cache failed: %r") % (errmsg)) try: user_filter = self.config_get_raw('user_filter') % ( - {'base_dn': base_dn} - ) + {'base_dn': base_dn} + ) - except TypeError, errmsg: + except TypeError: user_filter = self.config_get_raw('user_filter') _filter = '(&(|' @@ -203,82 +160,90 @@ # Attempt to obtain an entry_dn from cache. try: entry_dn = auth_cache.get_entry(_filter) - except Exception, errmsg: - log.error(_("Authentication cache failed: %r") % (errmsg)) - pass + except Exception as errmsg: + log.error(_l("Authentication cache failed: %r") % (errmsg)) retval = False + timeout = self.config_get('ldap', 'timeout', default=10) if entry_dn is None: _search = self.ldap.search_ext( - base_dn, - ldap.SCOPE_SUBTREE, - _filter, - ['entrydn'] - ) + base_dn, + ldap.SCOPE_SUBTREE, + filterstr=_filter, + attrlist=['entrydn'], + attrsonly=True, + timeout=timeout + ) try: ( - _result_type, - _result_data, - _result_msgid, - _result_controls - ) = self.ldap.result3(_search) - - except ldap.SERVER_DOWN, errmsg: - log.error(_("LDAP server unavailable: %r") % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) - self._disconnect() + _result_type, + _result_data, + _result_msgid, + _result_controls + ) = self.ldap.result3(_search) - return False - - except ldap.NO_SUCH_OBJECT: + except ldap.INVALID_CREDENTIALS: log.error( - _("Invalid DN, username and/or password for '%s'.") % ( - bind_dn + _l("Invalid DN, username and/or password for '%s'.") % ( + _filter ) ) return False - except ldap.INVALID_CREDENTIALS: + except ldap.NO_SUCH_OBJECT: log.error( - _("Invalid DN, username and/or password for '%s'.") % ( - bind_dn + _l("Invalid DN, username and/or password for '%s'.") % ( + _filter ) ) return False - except Exception, errmsg: - log.error(_("Exception occurred: %r") % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) + except ldap.SERVER_DOWN as errmsg: + log.error(_l("LDAP server unavailable: %r") % (errmsg)) + log.error(traceback.format_exc()) + self._disconnect() + + return False + + except ldap.TIMEOUT: + log.error(_l("LDAP timeout.")) + self._disconnect() + + return False + + except Exception as errmsg: + log.error(_l("Exception occurred: %r") % (errmsg)) + log.error(traceback.format_exc()) self._disconnect() return False log.debug( - _("Length of entries found: %r") % ( - len(_result_data) - ), - level=8 - ) + _l("Length of entries found: %r") % ( + len(_result_data) + ), + level=8 + ) # Remove referrals _result_data = [_e for _e in _result_data if _e[0] is not None] if len(_result_data) == 1: - (entry_dn, entry_attrs) = _result_data[0] + (entry_dn, _) = _result_data[0] elif len(_result_data) > 1: try: log.info( - _("Authentication for %r failed " + - "(multiple entries)") % ( - login[0] - ) + _l("Authentication for %r failed (multiple entries)") % ( + login[0] ) - except: + ) + + except Exception: pass self._disconnect() @@ -287,11 +252,12 @@ else: try: log.info( - _("Authentication for %r failed (no entry)") % ( - login[0] - ) + _l("Authentication for %r failed (no entry)") % ( + login[0] ) - except: + ) + + except Exception: pass self._disconnect() @@ -300,11 +266,12 @@ if entry_dn is None: try: log.info( - _("Authentication for %r failed (LDAP error?)") % ( - login[0] - ) + _l("Authentication for %r failed (LDAP error?)") % ( + login[0] ) - except: + ) + + except Exception: pass self._disconnect() @@ -318,22 +285,23 @@ if retval: try: log.info( - _("Authentication for %r succeeded") % ( - login[0] - ) + _l("Authentication for %r succeeded") % ( + login[0] ) + ) - except: + except Exception: pass else: try: log.info( - _("Authentication for %r failed (error)") % ( - login[0] - ) + _l("Authentication for %r failed (error)") % ( + login[0] ) - except: + ) + + except Exception: pass self._disconnect() @@ -341,25 +309,25 @@ try: auth_cache.set_entry(_filter, entry_dn) - except Exception, errmsg: - log.error(_("Authentication cache failed: %r") % (errmsg)) - pass + except Exception as errmsg: + log.error(_l("Authentication cache failed: %r") % (errmsg)) - except ldap.SERVER_DOWN, errmsg: - log.error(_("Authentication failed, LDAP server unavailable")) + except ldap.SERVER_DOWN: + log.error(_l("Authentication failed, LDAP server unavailable")) self._disconnect() return False - except Exception, errmsg: + except Exception: try: log.debug( - _("Failed to authenticate as user %r") % ( - login[0] - ), - level=8 - ) - except: + _l("Failed to authenticate as user %r") % ( + login[0] + ), + level=8 + ) + + except Exception: pass self._disconnect() @@ -372,48 +340,47 @@ retval = self._bind(entry_dn, login[1]) if retval: - log.info(_("Authentication for %r succeeded") % (login[0])) + log.info(_l("Authentication for %r succeeded") % (login[0])) else: log.info( - _("Authentication for %r failed (password)") % ( - login[0] - ) + _l("Authentication for %r failed (password)") % ( + login[0] ) + ) self._disconnect() return False - except ldap.NO_SUCH_OBJECT, errmsg: + except ldap.NO_SUCH_OBJECT as errmsg: log.debug( - _("Error occured, there is no such object: %r") % ( - errmsg - ), - level=8 - ) + _l("Error occured, there is no such object: %r") % ( + errmsg + ), + level=8 + ) self.bind = None try: auth_cache.del_entry(_filter) - except: - log.error(_("Authentication cache failed to clear entry")) - pass + except Exception: + log.error(_l("Authentication cache failed to clear entry")) retval = self.authenticate(login, realm) - except Exception, errmsg: - log.debug(_("Exception occured: %r") %(errmsg)) + except Exception as errmsg: + log.debug(_l("Exception occured: %r") % (errmsg)) try: log.debug( - _("Failed to authenticate as user %r") % ( - login[0] - ), - level=8 - ) + _l("Failed to authenticate as user %r") % ( + login[0] + ), + level=8 + ) - except: + except Exception: pass self._disconnect() @@ -435,11 +402,11 @@ if priv is not None and self.ldap_priv is not None: return - log.debug(_("Connecting to LDAP..."), level=8) + log.debug(_l("Connecting to LDAP..."), level=8) uri = self.config_get('ldap_uri') - log.debug(_("Attempting to use LDAP URI %s") % (uri), level=8) + log.debug(_l("Attempting to use LDAP URI %s") % (uri), level=8) trace_level = 0 @@ -454,12 +421,12 @@ retry_delay = 3.0 conn = ldap.ldapobject.ReconnectLDAPObject( - uri, - trace_level=trace_level, - trace_file=pykolab.logger.StderrToLogger(log), - retry_max=retry_max, - retry_delay=retry_delay - ) + uri, + trace_level=trace_level, + trace_file=pykolab.logger.StderrToLogger(log), + retry_max=retry_max, + retry_delay=retry_delay + ) if immediate: conn.set_option(ldap.OPT_TIMEOUT, 10) @@ -495,7 +462,7 @@ config_base_dn = self.config_get('base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: base_dn = ldap_base_dn else: base_dn = config_base_dn @@ -503,21 +470,21 @@ _filter = "(%s=%s)" % (unique_attribute, ldap.filter.escape_filter_chars(entry_id)) _search = self.ldap.search_ext( - base_dn, - ldap.SCOPE_SUBTREE, - _filter, - ['entrydn'] - ) + base_dn, + ldap.SCOPE_SUBTREE, + _filter, + ['entrydn'] + ) ( - _result_type, - _result_data, - _result_msgid, - _result_controls - ) = self.ldap.result3(_search) + _result_type, + _result_data, + _result_msgid, + _result_controls + ) = self.ldap.result3(_search) if len(_result_data) >= 1: - (entry_dn, entry_attrs) = _result_data[0] + (entry_dn, _) = _result_data[0] return entry_dn @@ -530,12 +497,13 @@ entry_attrs = self.get_entry_attributes(entry_id, [attribute]) - if entry_attrs.has_key(attribute): + if attribute in entry_attrs: return entry_attrs[attribute] - elif entry_attrs.has_key(attribute.lower()): + + if attribute.lower() in entry_attrs: return entry_attrs[attribute.lower()] - else: - return None + + return None def get_entry_attributes(self, entry_id, attributes): """ @@ -544,32 +512,32 @@ self._bind() - log.debug(_("Entry ID: %r") % (entry_id), level=8) + log.debug(_l("Entry ID: %r") % (entry_id), level=8) entry_dn = self.entry_dn(entry_id) - log.debug(_("Entry DN: %r") % (entry_dn), level=8) + log.debug(_l("Entry DN: %r") % (entry_dn), level=8) log.debug( - _("ldap search: (%r, %r, filterstr='(objectclass=*)', attrlist=[ 'dn' ] + %r") % ( - entry_dn, - ldap.SCOPE_BASE, - attributes - ), - level=8 - ) - - _search = self.ldap.search_ext( + _l("ldap search: (%r, %r, filterstr='(objectclass=*)', attrlist=[ 'dn' ] + %r") % ( entry_dn, ldap.SCOPE_BASE, - filterstr='(objectclass=*)', - attrlist=[ 'dn' ] + attributes - ) + attributes + ), + level=8 + ) + + _search = self.ldap.search_ext( + entry_dn, + ldap.SCOPE_BASE, + filterstr='(objectclass=*)', + attrlist=['dn'] + attributes + ) ( - _result_type, - _result_data, - _result_msgid, - _result_controls - ) = self.ldap.result3(_search) + _result_type, + _result_data, + _result_msgid, + _result_controls + ) = self.ldap.result3(_search) if len(_result_data) >= 1: (_entry_dn, _entry_attrs) = _result_data[0] @@ -588,6 +556,7 @@ return self.extract_recipient_addresses(entry) if entry is not None else [] + # pylint: disable=no-self-use def extract_recipient_addresses(self, entry): """ Extact a list of all valid recipient addresses for the given LDAP entry. @@ -597,10 +566,10 @@ mail_attributes = conf.get_list('ldap', 'mail_attributes') for attr in mail_attributes: - if entry.has_key(attr): + if attr in entry: if isinstance(entry[attr], list): recipient_addresses.extend(entry[attr]) - elif isinstance(entry[attr], basestring): + elif isinstance(entry[attr], string_types): recipient_addresses.append(entry[attr]) return recipient_addresses @@ -612,13 +581,18 @@ delegators = [] mailbox_attribute = conf.get('cyrus-sasl', 'result_attribute') - if mailbox_attribute == None: + if mailbox_attribute is None: mailbox_attribute = 'mail' for __delegator in self.search_entry_by_attribute('kolabDelegate', entry_id): (_dn, _delegator) = __delegator - _delegator['dn'] = _dn; - _delegator['_mailbox_basename'] = _delegator[mailbox_attribute] if _delegator.has_key(mailbox_attribute) else None + _delegator['dn'] = _dn + + if mailbox_attribute in _delegator: + _delegator['_mailbox_basename'] = _delegator[mailbox_attribute] + else: + _delegator['_mailbox_basename'] = None + if isinstance(_delegator['_mailbox_basename'], list): _delegator['_mailbox_basename'] = _delegator['_mailbox_basename'][0] delegators.append(_delegator) @@ -635,18 +609,18 @@ self._bind() - if not exclude_entry_id == None: + if exclude_entry_id is not None: __filter_prefix = "(&" __filter_suffix = "(!(%s=%s)))" % ( - self.config_get('unique_attribute'), - exclude_entry_id - ) + self.config_get('unique_attribute'), + exclude_entry_id + ) else: __filter_prefix = "" __filter_suffix = "" resource_filter = self.config_get('resource_filter') - if not resource_filter == None: + if resource_filter is not None: __filter_prefix = "(&%s" % resource_filter __filter_suffix = ")" @@ -658,7 +632,7 @@ _filter = "(|" - if isinstance(folder, basestring): + if isinstance(folder, string_types): _filter += "(kolabTargetFolder=%s)" % (folder) else: for _folder in folder: @@ -666,9 +640,9 @@ _filter += ")" - _filter = "%s%s%s" % (__filter_prefix,_filter,__filter_suffix) + _filter = "%s%s%s" % (__filter_prefix, _filter, __filter_suffix) - log.debug(_("Finding resource with filter %r") % (_filter), level=8) + log.debug(_l("Finding resource with filter %r") % (_filter), level=8) if len(_filter) <= 6: return None @@ -676,18 +650,18 @@ config_base_dn = self.config_get('resource_base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: resource_base_dn = ldap_base_dn else: resource_base_dn = config_base_dn _results = self.ldap.search_s( - resource_base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr=_filter, - attrlist=result_attributes, - attrsonly=True - ) + resource_base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr=_filter, + attrlist=result_attributes, + attrsonly=True + ) _entry_dns = [] @@ -714,19 +688,17 @@ self._bind() - if not exclude_entry_id == None: + if exclude_entry_id is not None: __filter_prefix = "(&" __filter_suffix = "(!(%s=%s)))" % ( - self.config_get('unique_attribute'), - ldap.filter.escape_filter_chars(exclude_entry_id) - ) + self.config_get('unique_attribute'), + ldap.filter.escape_filter_chars(exclude_entry_id) + ) else: __filter_prefix = "" __filter_suffix = "" - kolab_filter = self._kolab_filter() - if search_attrs is not None: recipient_address_attrs = search_attrs else: @@ -738,7 +710,7 @@ _filter = "(|" for recipient_address_attr in recipient_address_attrs: - if isinstance(address, basestring): + if isinstance(address, string_types): _filter += "(%s=%s)" % (recipient_address_attr, address) else: for _address in address: @@ -746,9 +718,9 @@ _filter += ")" - _filter = "%s%s%s" % (__filter_prefix,_filter,__filter_suffix) + _filter = "%s%s%s" % (__filter_prefix, _filter, __filter_suffix) - log.debug(_("Finding recipient with filter %r") % (_filter), level=8) + log.debug(_l("Finding recipient with filter %r") % (_filter), level=8) if len(_filter) <= 6: return None @@ -756,18 +728,18 @@ config_base_dn = self.config_get('base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: base_dn = ldap_base_dn else: base_dn = config_base_dn _results = self.ldap.search_s( - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr=_filter, - attrlist=result_attributes, - attrsonly=True - ) + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr=_filter, + attrlist=result_attributes, + attrsonly=True + ) _entry_dns = [] @@ -775,7 +747,7 @@ (_entry_id, _entry_attrs) = _result # Prevent Active Directory referrals - if not _entry_id == None: + if _entry_id is not None: _entry_dns.append(_entry_id) return _entry_dns @@ -790,19 +762,19 @@ self._bind() - if not exclude_entry_id == None: + if exclude_entry_id is not None: __filter_prefix = "(&" __filter_suffix = "(!(%s=%s)))" % ( - self.config_get('unique_attribute'), - ldap.filter.escape_filter_chars(exclude_entry_id) - ) + self.config_get('unique_attribute'), + ldap.filter.escape_filter_chars(exclude_entry_id) + ) else: __filter_prefix = "" __filter_suffix = "" resource_filter = self.config_get('resource_filter') - if not resource_filter == None: + if resource_filter is not None: __filter_prefix = "(&%s" % resource_filter __filter_suffix = ")" @@ -814,7 +786,7 @@ _filter = "(|" for recipient_address_attr in recipient_address_attrs: - if isinstance(address, basestring): + if isinstance(address, string_types): _filter += "(%s=%s)" % (recipient_address_attr, address) else: for _address in address: @@ -822,9 +794,9 @@ _filter += ")" - _filter = "%s%s%s" % (__filter_prefix,_filter,__filter_suffix) + _filter = "%s%s%s" % (__filter_prefix, _filter, __filter_suffix) - log.debug(_("Finding resource with filter %r") % (_filter), level=8) + log.debug(_l("Finding resource with filter %r") % (_filter), level=8) if len(_filter) <= 6: return None @@ -832,18 +804,18 @@ config_base_dn = self.config_get('resource_base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: resource_base_dn = ldap_base_dn else: resource_base_dn = config_base_dn _results = self.ldap.search_s( - resource_base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr=_filter, - attrlist=result_attributes, - attrsonly=True - ) + resource_base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr=_filter, + attrlist=result_attributes, + attrsonly=True + ) # Remove referrals _entry_dns = [_e[0] for _e in _results if _e[0] is not None] @@ -852,17 +824,17 @@ def get_latest_sync_timestamp(self): timestamp = cache.last_modify_timestamp(self.domain) - log.debug(_("Using timestamp %r") % (timestamp), level=8) + log.debug(_l("Using timestamp %r") % (timestamp), level=8) return timestamp def list_secondary_domains(self): """ List alias domain name spaces for the current domain name space. """ - if not self.domains == None: - return [s for s in self.domains.keys() if not s in self.domains.values()] - else: - return [] + if self.domains is not None: + return [s for s in self.domains.keys() if s not in self.domains.values()] + + return [] def recipient_policy(self, entry): """ @@ -886,20 +858,19 @@ secondary_mail_attribute = mail_attributes[1] daemon_rcpt_policy = self.config_get('daemon_rcpt_policy') - if not utils.true_or_false(daemon_rcpt_policy) and not daemon_rcpt_policy == None: + if not utils.true_or_false(daemon_rcpt_policy) and daemon_rcpt_policy is not None: log.debug( - _( - "Not applying recipient policy for %s " + \ - "(disabled through configuration)" - ) % (entry_dn), - level=1 - ) + _l("Not applying recipient policy for %s (disabled through configuration)") % ( + entry_dn + ), + level=1 + ) return entry_modifications want_attrs = [] - log.debug(_("Applying recipient policy to %r") % (entry_dn), level=8) + log.debug(_l("Applying recipient policy to %r") % (entry_dn), level=8) # See which mail attributes we would want to control. # @@ -907,79 +878,81 @@ # 'alias' and 'mailalternateaddress' are considered for secondary mail. # primary_mail = self.config_get_raw('%s_primary_mail' % (entry_type)) - if primary_mail == None and entry_type == 'user': + if primary_mail is None and entry_type == 'user': primary_mail = self.config_get_raw('primary_mail') - if not secondary_mail_attribute == None: + if secondary_mail_attribute is not None: secondary_mail = self.config_get_raw('%s_secondary_mail' % (entry_type)) - if secondary_mail == None and entry_type == 'user': + if secondary_mail is None and entry_type == 'user': secondary_mail = self.config_get_raw('secondary_mail') log.debug( - _("Using mail attributes: %r, with primary %r and " + \ - "secondary %r") % ( - mail_attributes, - primary_mail_attribute, - secondary_mail_attribute - ), - level=8 - ) + _l("Using mail attributes: %r, with primary %r and secondary %r") % ( + mail_attributes, + primary_mail_attribute, + secondary_mail_attribute + ), + level=8 + ) for _mail_attr in mail_attributes: - if not entry.has_key(_mail_attr): - log.debug(_("key %r not in entry") % (_mail_attr), level=8) + if _mail_attr not in entry: + log.debug(_l("key %r not in entry") % (_mail_attr), level=8) if _mail_attr == primary_mail_attribute: - log.debug(_("key %r is the prim. mail attr.") % (_mail_attr), level=8) - if not primary_mail == None: - log.debug(_("prim. mail pol. is not empty"), level=8) + log.debug(_l("key %r is the prim. mail attr.") % (_mail_attr), level=8) + if primary_mail is not None: + log.debug(_l("prim. mail pol. is not empty"), level=8) want_attrs.append(_mail_attr) elif _mail_attr == secondary_mail_attribute: - log.debug(_("key %r is the sec. mail attr.") % (_mail_attr), level=8) - if not secondary_mail == None: - log.debug(_("sec. mail pol. is not empty"), level=8) + log.debug(_l("key %r is the sec. mail attr.") % (_mail_attr), level=8) + if secondary_mail is not None: + log.debug(_l("sec. mail pol. is not empty"), level=8) want_attrs.append(_mail_attr) - if len(want_attrs) > 0: - log.debug(_("Attributes %r are not yet available for entry %r") % ( - want_attrs, - entry_dn - ), - level=8 - ) + if want_attrs: + log.debug( + _l("Attributes %r are not yet available for entry %r") % ( + want_attrs, + entry_dn + ), + level=8 + ) # Also append the preferredlanguage or 'native tongue' configured # for the entry. - if not entry.has_key('preferredlanguage'): + if 'preferredlanguage' not in entry: want_attrs.append('preferredlanguage') # If we wanted anything, now is the time to get it. - if len(want_attrs) > 0: - log.debug(_("Attributes %r are not yet available for entry %r") % ( - want_attrs, - entry_dn - ), - level=8 - ) + if want_attrs: + log.debug( + _l("Attributes %r are not yet available for entry %r") % ( + want_attrs, + entry_dn + ), + level=8 + ) + attributes = self.get_entry_attributes(entry_dn, want_attrs) for attribute in attributes.keys(): entry[attribute] = attributes[attribute] - if not entry.has_key('preferredlanguage'): + if 'preferredlanguage' not in entry: entry['preferredlanguage'] = conf.get('kolab', 'default_locale') # Primary mail address - if not primary_mail == None: + if primary_mail is not None: primary_mail_address = conf.plugins.exec_hook( - "set_primary_mail", - kw={ - 'primary_mail': primary_mail, - 'entry': entry, - 'primary_domain': self.domain - } - ) + "set_primary_mail", + kw={ + 'primary_mail': primary_mail, + 'entry': entry, + 'primary_domain': self.domain + } + ) - if primary_mail_address == None: + if primary_mail_address is None: return entry_modifications i = 1 @@ -991,41 +964,43 @@ # Length of results should be 0 (no entry found) # or 1 (which should be the entry we're looking at here) - if len(results) == 0: + if not results: log.debug( - _("No results for mail address %s found") % ( - _primary_mail - ), - level=8 - ) + _l("No results for mail address %s found") % ( + _primary_mail + ), + level=8 + ) done = True continue if len(results) == 1: log.debug( - _("1 result for address %s found, verifying") % ( - _primary_mail - ), - level=8 - ) + _l("1 result for address %s found, verifying") % ( + _primary_mail + ), + level=8 + ) almost_done = True for result in results: if not result == entry_dn: log.debug( - _("Too bad, primary email address %s " + \ - "already in use for %s (we are %s)") % ( - _primary_mail, - result, - entry_dn - ), - level=8 - ) + _l( + "Too bad, primary email address %s " + + "already in use for %s (we are %s)" + ) % ( + _primary_mail, + result, + entry_dn + ), + level=8 + ) almost_done = False else: - log.debug(_("Address assigned to us"), level=8) + log.debug(_l("Address assigned to us"), level=8) if almost_done: done = True @@ -1033,38 +1008,43 @@ i += 1 _primary_mail = "%s%d@%s" % ( - primary_mail_address.split('@')[0], - i, - primary_mail_address.split('@')[1] - ) + primary_mail_address.split('@')[0], + i, + primary_mail_address.split('@')[1] + ) primary_mail_address = _primary_mail ### - ### FIXME + # FIXME ### - if not primary_mail_address == None: - if not entry.has_key(primary_mail_attribute): + if primary_mail_address is not None: + if primary_mail_attribute not in entry: self.set_entry_attribute(entry, primary_mail_attribute, primary_mail_address) entry_modifications[primary_mail_attribute] = primary_mail_address else: if not primary_mail_address == entry[primary_mail_attribute]: - self.set_entry_attribute(entry, primary_mail_attribute, primary_mail_address) + self.set_entry_attribute( + entry, + primary_mail_attribute, + primary_mail_address + ) entry_modifications[primary_mail_attribute] = primary_mail_address - if not secondary_mail == None: + # pylint: disable=too-many-nested-blocks + if secondary_mail is not None: # Execute the plugin hook suggested_secondary_mail = conf.plugins.exec_hook( - "set_secondary_mail", - kw={ - 'secondary_mail': secondary_mail, - 'entry': entry, - 'domain': self.domain, - 'primary_domain': self.domain, - 'secondary_domains': self.list_secondary_domains() - } - ) # end of conf.plugins.exec_hook() call + "set_secondary_mail", + kw={ + 'secondary_mail': secondary_mail, + 'entry': entry, + 'domain': self.domain, + 'primary_domain': self.domain, + 'secondary_domains': self.list_secondary_domains() + } + ) # end of conf.plugins.exec_hook() call secondary_mail_addresses = [] @@ -1078,43 +1058,44 @@ # Length of results should be 0 (no entry found) # or 1 (which should be the entry we're looking at here) - if len(results) == 0: + if not results: log.debug( - _("No results for address %s found") % ( - __secondary_mail - ), - level=8 - ) + _l("No results for address %s found") % ( + __secondary_mail + ), + level=8 + ) done = True continue if len(results) == 1: log.debug( - _("1 result for address %s found, " + \ - "verifying...") % ( - __secondary_mail - ), - level=8 - ) + _l("1 result for address %s found, verifying...") % ( + __secondary_mail + ), + level=8 + ) almost_done = True for result in results: if not result == entry_dn: log.debug( - _("Too bad, secondary email " + \ - "address %s already in use for " + \ - "%s (we are %s)") % ( - __secondary_mail, - result, - entry_dn - ), - level=8 - ) + _l( + "Too bad, secondary email " + + "address %s already in use for " + + "%s (we are %s)" + ) % ( + __secondary_mail, + result, + entry_dn + ), + level=8 + ) almost_done = False else: - log.debug(_("Address assigned to us"), level=8) + log.debug(_l("Address assigned to us"), level=8) if almost_done: done = True @@ -1122,90 +1103,120 @@ i += 1 __secondary_mail = "%s%d@%s" % ( - _secondary_mail.split('@')[0], - i, - _secondary_mail.split('@')[1] - ) + _secondary_mail.split('@')[0], + i, + _secondary_mail.split('@')[1] + ) secondary_mail_addresses.append(__secondary_mail) - log.debug(_("Recipient policy composed the following set of secondary " + \ - "email addresses: %r") % (secondary_mail_addresses), level=8) + log.debug( + _l( + "Recipient policy composed the following set of secondary email addresses: %r" + ) % ( + secondary_mail_addresses + ), + level=8 + ) - if entry.has_key(secondary_mail_attribute): + if secondary_mail_attribute in entry: if isinstance(entry[secondary_mail_attribute], list): secondary_mail_addresses.extend(entry[secondary_mail_attribute]) else: secondary_mail_addresses.append(entry[secondary_mail_attribute]) - if not secondary_mail_addresses == None: + if secondary_mail_addresses is not None: log.debug( - _("Secondary mail addresses that we want is not None: %r") % ( - secondary_mail_addresses - ), - level=8 - ) + _l("Secondary mail addresses that we want is not None: %r") % ( + secondary_mail_addresses + ), + level=8 + ) secondary_mail_addresses = list(set(secondary_mail_addresses)) # Avoid duplicates while primary_mail_address in secondary_mail_addresses: log.debug( - _("Avoiding the duplication of the primary mail " + \ - "address %r in the list of secondary mail " + \ - "addresses") % (primary_mail_address), - level=8 - ) + _l( + "Avoiding the duplication of the primary mail " + + "address %r in the list of secondary mail " + + "addresses" + ) % (primary_mail_address), + level=8 + ) secondary_mail_addresses.pop( - secondary_mail_addresses.index(primary_mail_address) - ) + secondary_mail_addresses.index(primary_mail_address) + ) log.debug( - _("Entry is getting secondary mail addresses: %r") % ( - secondary_mail_addresses - ), - level=8 - ) + _l("Entry is getting secondary mail addresses: %r") % ( + secondary_mail_addresses + ), + level=8 + ) - if not entry.has_key(secondary_mail_attribute): + if secondary_mail_attribute not in entry: log.debug( - _("Entry did not have any secondary mail " + \ - "addresses in %r") % (secondary_mail_attribute), - level=8 - ) + _l("Entry did not have any secondary mail addresses in %r") % ( + secondary_mail_attribute + ), + level=8 + ) - if not len(secondary_mail_addresses) == 0: + if secondary_mail_addresses: self.set_entry_attribute( - entry, - secondary_mail_attribute, - secondary_mail_addresses - ) + entry, + secondary_mail_attribute, + secondary_mail_addresses + ) entry_modifications[secondary_mail_attribute] = secondary_mail_addresses else: - if isinstance(entry[secondary_mail_attribute], basestring): + if isinstance(entry[secondary_mail_attribute], string_types): entry[secondary_mail_attribute] = [entry[secondary_mail_attribute]] - log.debug(_("secondary_mail_addresses: %r") % (secondary_mail_addresses), level=8) - log.debug(_("entry[%s]: %r") % (secondary_mail_attribute,entry[secondary_mail_attribute]), level=8) + log.debug( + _l("secondary_mail_addresses: %r") % (secondary_mail_addresses), + level=8 + ) + + log.debug( + _l("entry[%s]: %r") % ( + secondary_mail_attribute, + entry[secondary_mail_attribute] + ), + level=8 + ) secondary_mail_addresses.sort() entry[secondary_mail_attribute].sort() - log.debug(_("secondary_mail_addresses: %r") % (secondary_mail_addresses), level=8) - log.debug(_("entry[%s]: %r") % (secondary_mail_attribute,entry[secondary_mail_attribute]), level=8) + log.debug( + _l("secondary_mail_addresses: %r") % (secondary_mail_addresses), + level=8 + ) + + log.debug( + _l("entry[%s]: %r") % ( + secondary_mail_attribute, + entry[secondary_mail_attribute] + ), + level=8 + ) - if not list(set(secondary_mail_addresses)) == list(set(entry[secondary_mail_attribute])): + smas = list(set(secondary_mail_addresses)) + if smas != list(set(entry[secondary_mail_attribute])): self.set_entry_attribute( - entry, - secondary_mail_attribute, - list(set(secondary_mail_addresses)) - ) + entry, + secondary_mail_attribute, + smas + ) - entry_modifications[secondary_mail_attribute] = list(set(secondary_mail_addresses)) + entry_modifications[secondary_mail_attribute] = smas - log.debug(_("Entry modifications list: %r") % (entry_modifications), level=8) + log.debug(_l("Entry modifications list: %r") % (entry_modifications), level=8) return entry_modifications @@ -1224,19 +1235,19 @@ config_base_dn = self.config_get('base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: base_dn = ldap_base_dn else: base_dn = config_base_dn _results = self._search( - base_dn, - filterstr=_filter, - attrlist=[ - '*', - ], - override_search='_regular_search' - ) + base_dn, + filterstr=_filter, + attrlist=[ + '*', + ], + override_search='_regular_search' + ) # Remove referrals _entry_dns = [_e for _e in _results if _e[0] is not None] @@ -1244,8 +1255,12 @@ return _entry_dns def set_entry_attribute(self, entry_id, attribute, value): - log.debug(_("Setting entry attribute %r to %r for %r") % (attribute, value, entry_id), level=8) - self.set_entry_attributes(entry_id, { attribute: value }) + log.debug( + _l("Setting entry attribute %r to %r for %r") % (attribute, value, entry_id), + level=8 + ) + + self.set_entry_attributes(entry_id, {attribute: value}) def set_entry_attributes(self, entry_id, attributes): self._bind() @@ -1261,28 +1276,33 @@ modlist = [] - for attribute in attrs.keys(): - if not entry.has_key(attribute): + for attribute, value in attrs.items(): + if attribute not in entry: entry[attribute] = self.get_entry_attribute(entry_id, attribute) - for attribute in attrs.keys(): - if entry.has_key(attribute) and entry[attribute] == None: - modlist.append((ldap.MOD_ADD, attribute, attrs[attribute])) - elif entry.has_key(attribute) and not entry[attribute] == None: - if attrs[attribute] == None: + if attribute in entry and entry[attribute] is None: + modlist.append((ldap.MOD_ADD, attribute, value)) + elif attribute in entry and entry[attribute] is not None: + if value is None: modlist.append((ldap.MOD_DELETE, attribute, entry[attribute])) else: - modlist.append((ldap.MOD_REPLACE, attribute, attrs[attribute])) + modlist.append((ldap.MOD_REPLACE, attribute, value)) dn = entry_dn - if len(modlist) > 0 and self._bind_priv() is True: + if modlist and self._bind_priv() is True: try: self.ldap_priv.modify_s(dn, modlist) - except Exception, errmsg: - log.error(_("Could not update dn:\nDN: %r\nModlist: %r\nError Message: %r") % (dn, modlist, errmsg)) - import traceback - log.error("%s" % (traceback.format_exc())) + except Exception as errmsg: + log.error( + _l("Could not update dn:\nDN: %r\nModlist: %r\nError Message: %r") % ( + dn, + modlist, + errmsg + ) + ) + + log.error(traceback.format_exc()) def synchronize(self, mode=0, callback=None): """ @@ -1304,7 +1324,7 @@ default="%Y%m%d%H%M%SZ" ).replace('%%', '%') - modified_after = datetime.datetime(1900, 01, 01, 00, 00, 00).strftime( + modified_after = datetime.datetime(1900, 1, 1, 00, 00, 00).strftime( modifytimestamp_format ) @@ -1313,9 +1333,9 @@ _filter = "(&%s(modifytimestamp>=%s))" % (_filter, modified_after) - log.debug(_("Synchronization is using filter %r") % (_filter), level=8) + log.debug(_l("Synchronization is using filter %r") % (_filter), level=8) - if not mode == 0: + if mode != 0: override_search = mode else: override_search = False @@ -1328,37 +1348,37 @@ else: base_dn = config_base_dn - log.debug(_("Synchronization is searching against base DN: %s") % (base_dn), level=8) + log.debug(_l("Synchronization is searching against base DN: %s") % (base_dn), level=8) - if callback == None: + if callback is None: callback = self._synchronize_callback try: self._search( - base_dn, - filterstr=_filter, - attrlist=[ - '*', - self.config_get('unique_attribute'), - conf.get('cyrus-sasl', 'result_attribute'), - 'modifytimestamp' - ], - override_search=override_search, - callback=callback, - ) - except Exception, errmsg: + base_dn, + filterstr=_filter, + attrlist=[ + '*', + self.config_get('unique_attribute'), + conf.get('cyrus-sasl', 'result_attribute'), + 'modifytimestamp' + ], + override_search=override_search, + callback=callback, + ) + except Exception as errmsg: log.error("An error occurred: %r" % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) + log.error(_l("%s") % (traceback.format_exc())) def user_quota(self, entry_id, folder): default_quota = self.config_get('default_quota') quota_attribute = self.config_get('quota_attribute') - if quota_attribute == None: + if quota_attribute is None: return # The default quota may be None, but LDAP quota could still be set - if default_quota == None: + if default_quota is None: default_quota = 0 self._bind() @@ -1368,66 +1388,70 @@ current_ldap_quota = self.get_entry_attribute(entry_dn, quota_attribute) _imap_quota = self.imap.get_quota(folder) - if _imap_quota == None: + if _imap_quota is None: used = None current_imap_quota = None else: (used, current_imap_quota) = _imap_quota log.debug( - _("About to consider the user quota for %r (used: %r, " + \ - "imap: %r, ldap: %r, default: %r") % ( - entry_dn, - used, - current_imap_quota, - current_ldap_quota, - default_quota - ), - level=8 - ) + _l( + "About to consider the user quota for %r (used: %r, " + + "imap: %r, ldap: %r, default: %r" + ) % ( + entry_dn, + used, + current_imap_quota, + current_ldap_quota, + default_quota + ), + level=8 + ) - new_quota = conf.plugins.exec_hook("set_user_folder_quota", kw={ - 'used': used, - 'imap_quota': current_imap_quota, - 'ldap_quota': current_ldap_quota, - 'default_quota': default_quota - } - ) + new_quota = conf.plugins.exec_hook( + "set_user_folder_quota", + kw={ + 'used': used, + 'imap_quota': current_imap_quota, + 'ldap_quota': current_ldap_quota, + 'default_quota': default_quota + } + ) try: current_ldap_quota = (int)(current_ldap_quota) - except: + except Exception: current_ldap_quota = None # If the new quota is zero, get out if new_quota == 0: return - if not current_ldap_quota == None: + if current_ldap_quota is not None: if not new_quota == (int)(current_ldap_quota): self.set_entry_attribute( - entry_dn, - quota_attribute, - "%s" % (new_quota) - ) + entry_dn, + quota_attribute, + "%s" % (new_quota) + ) else: - if not new_quota == None: + if new_quota is not None: self.set_entry_attribute( - entry_dn, - quota_attribute, - "%s" % (new_quota) - ) + entry_dn, + quota_attribute, + "%s" % (new_quota) + ) - if not current_imap_quota == None: + if current_imap_quota is not None: if not new_quota == current_imap_quota: self.imap.set_quota(folder, new_quota) else: - if not new_quota == None: + if new_quota is not None: self.imap.set_quota(folder, new_quota) ### - ### API depth level increasing! + # API depth level increasing! ### def _bind(self, bind_dn=None, bind_pw=None): @@ -1438,12 +1462,12 @@ # If the bind_dn is None and the bind_pw is not... fail if bind_dn is None and bind_pw is not None: - log.error(_("Attempting to bind without a DN but with a password")) + log.error(_l("Attempting to bind without a DN but with a password")) return False # and the same vice-versa - if bind_dn is not None and bind_pw is None: - log.error(_("Attempting to bind with a DN but without a password")) + if bind_dn is None and bind_pw is not None: + log.error(_l("Attempting to bind with a DN but without a password")) return False # If we are to bind as foo, we have no state. @@ -1462,7 +1486,7 @@ if bind_dn is not None: log.debug( - _("Binding with bind_dn: %s and password: %s") % ( + _l("Binding with bind_dn: %s and password: %s") % ( bind_dn, '*' * len(bind_pw) ), @@ -1477,15 +1501,15 @@ return True - except ldap.SERVER_DOWN, errmsg: - log.error(_("LDAP server unavailable: %r") % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) + except ldap.SERVER_DOWN as errmsg: + log.error(_l("LDAP server unavailable: %r") % (errmsg)) + log.error(_l("%s") % (traceback.format_exc())) return False except ldap.NO_SUCH_OBJECT: log.error( - _("Invalid DN, username and/or password for '%s'.") % ( + _l("Invalid DN, username and/or password for '%s'.") % ( bind_dn ) ) @@ -1494,7 +1518,7 @@ except ldap.INVALID_CREDENTIALS: log.error( - _("Invalid DN, username and/or password for '%s'.") % ( + _l("Invalid DN, username and/or password for '%s'.") % ( bind_dn ) ) @@ -1502,7 +1526,7 @@ return False else: - log.debug(_("bind() called but already bound"), level=8) + log.debug(_l("bind() called but already bound"), level=8) return True @@ -1516,20 +1540,20 @@ try: self.ldap_priv.simple_bind_s(bind_dn, bind_pw) return True - except ldap.SERVER_DOWN, errmsg: - log.error(_("LDAP server unavailable: %r") % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) + except ldap.SERVER_DOWN as errmsg: + log.error(_l("LDAP server unavailable: %r") % (errmsg)) + log.error(_l("%s") % (traceback.format_exc())) return False except ldap.INVALID_CREDENTIALS: log.error( - _("Invalid DN, username and/or password for '%s'.") % ( + _l("Invalid DN, username and/or password for '%s'.") % ( bind_dn ) ) return False else: - log.debug(_("bind_priv() called but already bound"), level=8) + log.debug(_l("bind_priv() called but already bound"), level=8) return True def _change_add_group(self, entry, change): @@ -1575,50 +1599,48 @@ # Get some configuration values mailserver_attribute = self.config_get('mailserver_attribute') - if entry.has_key(mailserver_attribute): + if mailserver_attribute in entry: server = entry[mailserver_attribute] foldertype_attribute = self.config_get('sharedfolder_type_attribute') - if not foldertype_attribute == None: - if not entry.has_key(foldertype_attribute): + if foldertype_attribute is not None: + if foldertype_attribute not in entry: entry[foldertype_attribute] = self.get_entry_attribute( - entry['id'], - foldertype_attribute - ) + entry['id'], + foldertype_attribute + ) - if not entry[foldertype_attribute] == None: + if entry[foldertype_attribute] is not None: entry['kolabfoldertype'] = entry[foldertype_attribute] - if not entry.has_key('kolabfoldertype'): + if 'kolabfoldertype' not in entry: entry['kolabfoldertype'] = self.get_entry_attribute( - entry['id'], - 'kolabfoldertype' - ) + entry['id'], + 'kolabfoldertype' + ) # A delivery address is postuser+targetfolder delivery_address_attribute = self.config_get('sharedfolder_delivery_address_attribute') - if delivery_address_attribute == None: + if delivery_address_attribute is None: delivery_address_attribute = 'mail' - if not entry.has_key(delivery_address_attribute): + if delivery_address_attribute not in entry: entry[delivery_address_attribute] = self.get_entry_attribute( - entry['id'], - delivery_address_attribute - ) + entry['id'], + delivery_address_attribute + ) - if not entry[delivery_address_attribute] == None: + if entry[delivery_address_attribute] is not None: if len(entry[delivery_address_attribute].split('+')) > 1: entry['kolabtargetfolder'] = entry[delivery_address_attribute].split('+')[1] - if not entry.has_key('kolabtargetfolder'): + if 'kolabtargetfolder' not in entry: entry['kolabtargetfolder'] = self.get_entry_attribute( - entry['id'], - 'kolabtargetfolder' - ) - - if entry.has_key('kolabtargetfolder') and \ - not entry['kolabtargetfolder'] == None: + entry['id'], + 'kolabtargetfolder' + ) + if 'kolabtargetfolder' in entry and entry['kolabtargetfolder'] is not None: folder_path = entry['kolabtargetfolder'] else: # TODO: What is *the* way to see if we need to create an @domain @@ -1636,79 +1658,79 @@ folder_path = "shared/%s" % folder_path folderacl_entry_attribute = self.config_get('sharedfolder_acl_entry_attribute') - if folderacl_entry_attribute == None: + if folderacl_entry_attribute is None: folderacl_entry_attribute = 'acl' - if not entry.has_key(folderacl_entry_attribute): + if folderacl_entry_attribute not in entry: entry[folderacl_entry_attribute] = self.get_entry_attribute( - entry['id'], - folderacl_entry_attribute - ) + entry['id'], + folderacl_entry_attribute + ) if not self.imap.shared_folder_exists(folder_path): self.imap.shared_folder_create(folder_path, server) - if entry.has_key('kolabfoldertype') and \ - not entry['kolabfoldertype'] == None: + if 'kolabfoldertype' in entry and entry['kolabfoldertype'] is not None: - self.imap.shared_folder_set_type( - folder_path, - entry['kolabfoldertype'] - ) + self.imap.shared_folder_set_type(folder_path, entry['kolabfoldertype']) entry['kolabfolderaclentry'] = self._parse_acl(entry[folderacl_entry_attribute]) - self.imap._set_kolab_mailfolder_acls( - entry['kolabfolderaclentry'], folder_path - ) + # pylint: disable=protected-access + self.imap._set_kolab_mailfolder_acls(entry['kolabfolderaclentry'], folder_path) - if entry.has_key(delivery_address_attribute) and \ - not entry[delivery_address_attribute] == None: - self.imap.set_acl(folder_path, 'anyone', '+p') + if delivery_address_attribute in entry: + if entry[delivery_address_attribute] is not None: + self.imap.set_acl(folder_path, 'anyone', '+p') - #if server == None: - #self.entry_set_attribute(mailserver_attribute, server) + # if server is None: + # self.entry_set_attribute(mailserver_attribute, server) def _change_add_unknown(self, entry, change): """ An entry has been add, and we do not know of what object type the entry was - user, group, role or sharedfolder. """ + success = None + result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if not entry.has_key(result_attribute): + if result_attribute not in entry: return None - if entry[result_attribute] == None: + if entry[result_attribute] is None: return None - for _type in ['user','group','role','sharedfolder']: + for _type in ['user', 'group', 'role', 'sharedfolder']: try: - eval("self._change_add_%s(entry, change)" % (_type)) + func = getattr(self, '_change_add_%s' % (_type)) + func(entry, change) success = True - except: + except Exception: success = False if success: break + return success + def _change_add_user(self, entry, change): """ An entry of type user was added. """ mailserver_attribute = self.config_get('mailserver_attribute') - if mailserver_attribute == None: + if mailserver_attribute is None: mailserver_attribute = 'mailhost' mailserver_attribute = mailserver_attribute.lower() result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if result_attribute == None: + if result_attribute is None: result_attribute = 'mail' result_attribute = result_attribute.lower() - if not entry.has_key(mailserver_attribute): + if mailserver_attribute not in entry: entry[mailserver_attribute] = \ self.get_entry_attribute(entry, mailserver_attribute) @@ -1716,10 +1738,10 @@ for key in rcpt_addrs: entry[key] = rcpt_addrs[key] - if not entry.has_key(result_attribute): + if result_attribute not in entry: return - if entry[result_attribute] == None: + if entry[result_attribute] is None: return if entry[result_attribute] == '': @@ -1731,30 +1753,30 @@ if not self.imap.user_mailbox_exists(entry[result_attribute].lower()): folder = self.imap.user_mailbox_create( - entry[result_attribute], - entry[mailserver_attribute] - ) + entry[result_attribute], + entry[mailserver_attribute] + ) else: - folder = "user%s%s" % (self.imap.get_separator(),entry[result_attribute].lower()) + folder = "user%s%s" % (self.imap.get_separator(), entry[result_attribute].lower()) server = self.imap.user_mailbox_server(folder) log.debug( - _("Entry %s attribute value: %r") % ( - mailserver_attribute, - entry[mailserver_attribute] - ), - level=8 - ) + _l("Entry %s attribute value: %r") % ( + mailserver_attribute, + entry[mailserver_attribute] + ), + level=8 + ) log.debug( - _("imap.user_mailbox_server(%r) result: %r") % ( - folder, - server - ), - level=8 - ) + _l("imap.user_mailbox_server(%r) result: %r") % ( + folder, + server + ), + level=8 + ) if not entry[mailserver_attribute] == server: self.set_entry_attribute(entry, mailserver_attribute, server) @@ -1768,14 +1790,13 @@ result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if not entry.has_key(result_attribute): + if result_attribute not in entry: return None - if entry[result_attribute] == None: + if entry[result_attribute] is None: return None - self.imap.cleanup_acls(entry[result_attribute]) - + return self.imap.cleanup_acls(entry[result_attribute]) def _change_delete_None(self, entry, change): """ @@ -1799,35 +1820,38 @@ """ result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if not entry.has_key(result_attribute): + if result_attribute not in entry: return None - if entry[result_attribute] == None: + if entry[result_attribute] is None: return None success = True - for _type in ['user','group','resource','role','sharedfolder']: + for _type in ['user', 'group', 'resource', 'role', 'sharedfolder']: try: - success = eval("self._change_delete_%s(entry, change)" % (_type)) - except Exception, errmsg: - log.error(_("An error occured: %r") % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) + func = getattr(self, '_change_delete_%s' % (_type)) + success = func(entry, change) + except Exception as errmsg: + log.error(_l("An error occured: %r") % (errmsg)) + log.error(_l("%s") % (traceback.format_exc())) success = False if success: break + return success + def _change_delete_user(self, entry, change): """ An entry of type user was deleted. """ result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if not entry.has_key(result_attribute): + if result_attribute not in entry: return None - if entry[result_attribute] == None: + if entry[result_attribute] is None: return None cache.delete_entry(self.domain, entry) @@ -1838,12 +1862,14 @@ # let plugins act upon this deletion conf.plugins.exec_hook( 'user_delete', - kw = { + kw={ 'user': entry, 'domain': self.domain } ) + return True + def _change_moddn_group(self, entry, change): # TODO: If the rdn attribute is the same as the result attribute... pass @@ -1855,16 +1881,15 @@ old_dn = change['previous_dn'] new_dn = change['dn'] - import ldap.dn - old_rdn = ldap.dn.explode_dn(old_dn)[0].split('=')[0] - new_rdn = ldap.dn.explode_dn(new_dn)[0].split('=')[0] + old_rdn = explode_dn(old_dn)[0].split('=')[0] + new_rdn = explode_dn(new_dn)[0].split('=')[0] result_attribute = conf.get('cyrus-sasl', 'result_attribute') old_canon_attr = None cache_entry = cache.get_entry(self.domain, entry) - if not cache_entry == None: + if cache_entry is not None: old_canon_attr = cache_entry.result_attribute # See if we have to trigger the recipient policy. Only really applies to @@ -1887,13 +1912,13 @@ if trigger_recipient_policy: entry_changes = self.recipient_policy(entry) - for key in entry_changes.keys(): - entry[key] = entry_changes[key] + for key, value in entry_changes.items(): + entry[key] = value - if not entry.has_key(result_attribute): + if result_attribute not in entry: return - if entry[result_attribute] == None: + if entry[result_attribute] is None: return if entry[result_attribute] == '': @@ -1901,17 +1926,12 @@ # Now look at entry_changes and old_canon_attr, and see if they're # the same value. - if entry_changes.has_key(result_attribute): - if not old_canon_attr == None: - self.imap.user_mailbox_create( - entry_changes[result_attribute] - ) + if result_attribute in entry_changes: + if old_canon_attr is not None: + self.imap.user_mailbox_create(entry_changes[result_attribute]) elif not entry_changes[result_attribute] == old_canon_attr: - self.imap.user_mailbox_rename( - old_canon_attr, - entry_changes[result_attribute] - ) + self.imap.user_mailbox_rename(old_canon_attr, entry_changes[result_attribute]) cache.get_entry(self.domain, entry) @@ -1937,48 +1957,46 @@ # Get some configuration values mailserver_attribute = self.config_get('mailserver_attribute') - if entry.has_key(mailserver_attribute): + if mailserver_attribute in entry: server = entry[mailserver_attribute] foldertype_attribute = self.config_get('sharedfolder_type_attribute') - if not foldertype_attribute == None: - if not entry.has_key(foldertype_attribute): + if foldertype_attribute is not None: + if foldertype_attribute not in entry: entry[foldertype_attribute] = self.get_entry_attribute( - entry['id'], - foldertype_attribute - ) + entry['id'], + foldertype_attribute + ) - if not entry[foldertype_attribute] == None: + if entry[foldertype_attribute] is not None: entry['kolabfoldertype'] = entry[foldertype_attribute] - if not entry.has_key('kolabfoldertype'): + if 'kolabfoldertype' not in entry: entry['kolabfoldertype'] = self.get_entry_attribute( - entry['id'], - 'kolabfoldertype' - ) + entry['id'], + 'kolabfoldertype' + ) # A delivery address is postuser+targetfolder delivery_address_attribute = self.config_get('sharedfolder_delivery_address_attribute') - if not delivery_address_attribute == None: - if not entry.has_key(delivery_address_attribute): + if delivery_address_attribute is not None: + if delivery_address_attribute not in entry: entry[delivery_address_attribute] = self.get_entry_attribute( - entry['id'], - delivery_address_attribute - ) + entry['id'], + delivery_address_attribute + ) - if not entry[delivery_address_attribute] == None: + if entry[delivery_address_attribute] is not None: if len(entry[delivery_address_attribute].split('+')) > 1: entry['kolabtargetfolder'] = entry[delivery_address_attribute].split('+')[1] - if not entry.has_key('kolabtargetfolder'): + if 'kolabtargetfolder' not in entry: entry['kolabtargetfolder'] = self.get_entry_attribute( - entry['id'], - 'kolabtargetfolder' - ) - - if entry.has_key('kolabtargetfolder') and \ - not entry['kolabtargetfolder'] == None: + entry['id'], + 'kolabtargetfolder' + ) + if 'kolabtargetfolder' in entry and entry['kolabtargetfolder'] is not None: folder_path = entry['kolabtargetfolder'] else: # TODO: What is *the* way to see if we need to create an @domain @@ -1996,34 +2014,30 @@ folder_path = "shared/%s" % folder_path folderacl_entry_attribute = self.config_get('sharedfolder_acl_entry_attribute') - if folderacl_entry_attribute == None: + if folderacl_entry_attribute is None: folderacl_entry_attribute = 'acl' - if not entry.has_key(folderacl_entry_attribute): + if folderacl_entry_attribute not in entry: entry[folderacl_entry_attribute] = self.get_entry_attribute( - entry['id'], - folderacl_entry_attribute - ) + entry['id'], + folderacl_entry_attribute + ) if not self.imap.shared_folder_exists(folder_path): self.imap.shared_folder_create(folder_path, server) - if entry.has_key('kolabfoldertype') and \ - not entry['kolabfoldertype'] == None: - + if 'kolabfoldertype' in entry and entry['kolabfoldertype'] is not None: self.imap.shared_folder_set_type( - folder_path, - entry['kolabfoldertype'] - ) + folder_path, + entry['kolabfoldertype'] + ) entry['kolabfolderaclentry'] = self._parse_acl(entry[folderacl_entry_attribute]) - self.imap._set_kolab_mailfolder_acls( - entry['kolabfolderaclentry'], folder_path, True - ) + # pylint: disable=protected-access + self.imap._set_kolab_mailfolder_acls(entry['kolabfolderaclentry'], folder_path, True) - if entry.has_key(delivery_address_attribute) and \ - not entry[delivery_address_attribute] == None: + if delivery_address_attribute in entry and entry[delivery_address_attribute] is not None: self.imap.set_acl(folder_path, 'anyone', '+p') def _change_modify_user(self, entry, change): @@ -2036,74 +2050,74 @@ # Initialize old_canon_attr (#1701) old_canon_attr = None - result_attribute = conf.get('cyrus-sasl','result_attribute') + result_attribute = conf.get('cyrus-sasl', 'result_attribute') _entry = cache.get_entry(self.domain, entry, update=False) # We do not necessarily have a synchronisation cache entry (#1701) - if not _entry == None: - if _entry.__dict__.has_key('result_attribute') and not _entry.result_attribute == '': + if _entry is not None: + if 'result_attribute' in _entry.__dict__ and not _entry.result_attribute == '': old_canon_attr = _entry.result_attribute entry_changes = self.recipient_policy(entry) log.debug( - _("Result from recipient policy: %r") % (entry_changes), - level=8 - ) + _l("Result from recipient policy: %r") % (entry_changes), + level=8 + ) - if entry_changes.has_key(result_attribute): + if result_attribute in entry_changes: if not entry_changes[result_attribute] == old_canon_attr: - if old_canon_attr == None: + if old_canon_attr is None: self.imap.user_mailbox_create( - entry_changes[result_attribute] - ) + entry_changes[result_attribute] + ) else: self.imap.user_mailbox_rename( - old_canon_attr, - entry_changes[result_attribute] - ) + old_canon_attr, + entry_changes[result_attribute] + ) entry[result_attribute] = entry_changes[result_attribute] cache.get_entry(self.domain, entry) - elif entry.has_key(result_attribute): + elif result_attribute in entry: if not entry[result_attribute] == old_canon_attr: - if old_canon_attr == None: + if old_canon_attr is None: self.imap.user_mailbox_create( - entry[result_attribute] - ) + entry[result_attribute] + ) else: self.imap.user_mailbox_rename( - old_canon_attr, - entry[result_attribute] - ) + old_canon_attr, + entry[result_attribute] + ) cache.get_entry(self.domain, entry) else: if not self.imap.user_mailbox_exists(entry[result_attribute]): self.imap.user_mailbox_create( - entry[result_attribute] - ) - - self.user_quota( - entry, - "user%s%s" % ( - self.imap.get_separator(), entry[result_attribute] ) + + self.user_quota( + entry, + "user%s%s" % ( + self.imap.get_separator(), + entry[result_attribute] ) + ) if conf.has_option(self.domain, 'sieve_mgmt'): sieve_mgmt_enabled = conf.get(self.domain, 'sieve_mgmt') if utils.true_or_false(sieve_mgmt_enabled): conf.plugins.exec_hook( - 'sieve_mgmt_refresh', - kw={ - 'user': entry[result_attribute] - } - ) + 'sieve_mgmt_refresh', + kw={ + 'user': entry[result_attribute] + } + ) def _change_none_group(self, entry, change): """ @@ -2135,36 +2149,35 @@ server = None mailserver_attribute = self.config_get('mailserver_attribute') - if entry.has_key(mailserver_attribute): + if mailserver_attribute in entry: server = entry[mailserver_attribute] - if not entry.has_key('kolabtargetfolder'): + if 'kolabtargetfolder' not in entry: entry['kolabtargetfolder'] = self.get_entry_attribute( - entry['id'], - 'kolabtargetfolder' - ) + entry['id'], + 'kolabtargetfolder' + ) - if not entry.has_key('kolabfoldertype'): + if 'kolabfoldertype' not in entry: entry['kolabfoldertype'] = self.get_entry_attribute( - entry['id'], - 'kolabfoldertype' - ) + entry['id'], + 'kolabfoldertype' + ) folderacl_entry_attribute = conf.get('ldap', 'sharedfolder_acl_entry_attribute') - if folderacl_entry_attribute == None: + if folderacl_entry_attribute is None: folderacl_entry_attribute = 'acl' - if not entry.has_key(folderacl_entry_attribute): + if folderacl_entry_attribute not in entry: entry['kolabfolderaclentry'] = self.get_entry_attribute( - entry['id'], - folderacl_entry_attribute - ) + entry['id'], + folderacl_entry_attribute + ) else: entry['kolabfolderaclentry'] = entry[folderacl_entry_attribute] del entry[folderacl_entry_attribute] - if entry.has_key('kolabtargetfolder') and \ - not entry['kolabtargetfolder'] == None: + if 'kolabtargetfolder' in entry and entry['kolabtargetfolder'] is not None: folder_path = entry['kolabtargetfolder'] else: @@ -2185,40 +2198,39 @@ if not self.imap.shared_folder_exists(folder_path): self.imap.shared_folder_create(folder_path, server) - if entry.has_key('kolabfoldertype') and \ - not entry['kolabfoldertype'] == None: + if 'kolabfoldertype' in entry and entry['kolabfoldertype'] is not None: self.imap.shared_folder_set_type( - folder_path, - entry['kolabfoldertype'] - ) + folder_path, + entry['kolabfoldertype'] + ) entry['kolabfolderaclentry'] = self._parse_acl(entry['kolabfolderaclentry']) self.imap._set_kolab_mailfolder_acls( - entry['kolabfolderaclentry'], folder_path, True - ) + entry['kolabfolderaclentry'], folder_path, True + ) delivery_address_attribute = self.config_get('sharedfolder_delivery_address_attribute') - if entry.has_key(delivery_address_attribute) and \ - not entry[delivery_address_attribute] == None: + if delivery_address_attribute in entry and \ + entry[delivery_address_attribute] is not None: self.imap.set_acl(folder_path, 'anyone', '+p') - #if server == None: - #self.entry_set_attribute(mailserver_attribute, server) + # if server is None: + # self.entry_set_attribute(mailserver_attribute, server) def _change_none_user(self, entry, change): """ A user entry as part of the initial search result set. """ mailserver_attribute = self.config_get('mailserver_attribute') - if mailserver_attribute == None: + if mailserver_attribute is None: mailserver_attribute = 'mailhost' mailserver_attribute = mailserver_attribute.lower() result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if result_attribute == None: + if result_attribute is None: result_attribute = 'mail' result_attribute = result_attribute.lower() @@ -2227,26 +2239,29 @@ _entry = cache.get_entry(self.domain, entry, update=False) - if not _entry == None and _entry.__dict__.has_key('result_attribute') and not _entry.result_attribute == '': + if _entry is not None and \ + 'result_attribute' in _entry.__dict__ and \ + not _entry.result_attribute == '': + old_canon_attr = _entry.result_attribute entry_changes = self.recipient_policy(entry) - if entry.has_key(result_attribute) and entry_changes.has_key(result_attribute): + if result_attribute in entry and result_attribute in entry_changes: if not entry[result_attribute] == entry_changes[result_attribute]: old_canon_attr = entry[result_attribute] log.debug( - _("Result from recipient policy: %r") % (entry_changes), - level=8 - ) + _l("Result from recipient policy: %r") % (entry_changes), + level=8 + ) - if entry_changes.has_key(result_attribute) and not old_canon_attr == None: + if result_attribute in entry_changes and old_canon_attr is not None: if not entry_changes[result_attribute] == old_canon_attr: self.imap.user_mailbox_rename( - old_canon_attr, - entry_changes[result_attribute] - ) + old_canon_attr, + entry_changes[result_attribute] + ) for key in entry_changes.keys(): entry[key] = entry_changes[key] @@ -2258,32 +2273,30 @@ server = None - if not entry.has_key(mailserver_attribute): + if mailserver_attribute not in entry: entry[mailserver_attribute] = self.get_entry_attribute(entry, mailserver_attribute) - if entry[mailserver_attribute] == "" or entry[mailserver_attribute] == None: + if entry[mailserver_attribute] == "" or entry[mailserver_attribute] is None: server = None else: server = entry[mailserver_attribute].lower() - if entry.has_key(result_attribute) and \ - not entry.has_key(result_attribute) == None: - + if result_attribute in entry and entry[result_attribute] is not None: if not self.imap.user_mailbox_exists(entry[result_attribute]): folder = self.imap.user_mailbox_create(entry[result_attribute], server=server) server = self.imap.user_mailbox_server(folder) else: folder = "user%s%s" % ( - self.imap.get_separator(), - entry[result_attribute] - ) + self.imap.get_separator(), + entry[result_attribute] + ) server = self.imap.user_mailbox_server(folder) self.user_quota(entry, folder) mailserver_attr = self.config_get('mailserver_attribute') - if not entry.has_key(mailserver_attr): + if mailserver_attr not in entry: self.set_entry_attribute(entry, mailserver_attr, server) else: if not entry[mailserver_attr] == server: @@ -2292,11 +2305,11 @@ else: log.warning( - _("Kolab user %s does not have a result attribute %r") % ( - entry['id'], - result_attribute - ) + _l("Kolab user %s does not have a result attribute %r") % ( + entry['id'], + result_attribute ) + ) def _disconnect(self): del self.ldap @@ -2314,22 +2327,22 @@ # Lower case of naming contexts - primarily for AD naming_contexts = utils.normalize(attrs['namingcontexts']) - if isinstance(naming_contexts, basestring): - naming_contexts = [ naming_contexts ] + if isinstance(naming_contexts, string_types): + naming_contexts = [naming_contexts] log.debug( - _("Naming contexts found: %r") % (naming_contexts), - level=8 - ) + _l("Naming contexts found: %r") % (naming_contexts), + level=8 + ) self._kolab_domain_root_dn(domain) log.debug( - _("Domains/Root DNs found: %r") % ( - self.domain_rootdns - ), - level=8 - ) + _l("Domains/Root DNs found: %r") % ( + self.domain_rootdns + ), + level=8 + ) # If we have a 1:1 match, continue as planned for naming_context in naming_contexts: @@ -2354,7 +2367,7 @@ Returns True or False """ if isinstance(value, dict): - if value.has_key('dn'): + if 'dn' in value: return True return False @@ -2367,12 +2380,11 @@ """ # Only basestrings can be DNs - if not isinstance(value, basestring): + if not isinstance(value, string_types): return False try: - import ldap.dn - ldap_dn = ldap.dn.explode_dn(value) + explode_dn(value) except ldap.DECODING_ERROR: # This is not a DN. return False @@ -2390,58 +2402,60 @@ config_base_dn = self.config_get('base_dn') ldap_base_dn = self._kolab_domain_root_dn(self.domain) - if not ldap_base_dn == None and not ldap_base_dn == config_base_dn: + if ldap_base_dn is not None and not ldap_base_dn == config_base_dn: base_dn = ldap_base_dn else: base_dn = config_base_dn for _type in ['user', 'group', 'sharedfolder']: __filter = self.config_get('kolab_%s_filter' % (_type)) - if __filter == None: + if __filter is None: __filter = self.config_get('%s_filter' % (_type)) - if not __filter == None: + if __filter is not None: try: result = self._regular_search(entry_dn, filterstr=__filter) - except: + except Exception: result = self._regular_search( - base_dn, - filterstr="(%s=%s)" %( - self.config_get('unique_attribute'), - entry_id['id']) - ) + base_dn, + filterstr="(%s=%s)" % ( + self.config_get('unique_attribute'), + entry_id['id'] + ) + ) if not result: continue else: return _type + return None + def _find_user_dn(self, login, kolabuser=False): """ Find the distinguished name (DN) for a (Kolab) user entry in LDAP. """ conf_prefix = 'kolab_' if kolabuser else '' - domain_root_dn = self._kolab_domain_root_dn(self.domain) user_base_dn = self.config_get(conf_prefix + 'user_base_dn') - if user_base_dn == None: + if user_base_dn is None: user_base_dn = self.config_get('base_dn') auth_attrs = self.config_get_list('auth_attributes') - auth_search_filter = [ '(|' ] + auth_search_filter = ['(|'] for auth_attr in auth_attrs: - auth_search_filter.append('(%s=%s)' % (auth_attr,login)) - if not '@' in login: + auth_search_filter.append('(%s=%s)' % (auth_attr, login)) + if '@' not in login: auth_search_filter.append( - '(%s=%s@%s)' % ( - auth_attr, - login, - self.domain - ) + '(%s=%s@%s)' % ( + auth_attr, + login, + self.domain ) + ) auth_search_filter.append(')') @@ -2450,16 +2464,16 @@ user_filter = self.config_get(conf_prefix + 'user_filter') search_filter = "(&%s%s)" % ( - auth_search_filter, - user_filter - ) + auth_search_filter, + user_filter + ) _results = self._search( - user_base_dn, - filterstr=search_filter, - attrlist=[ 'dn' ], - override_search='_regular_search' - ) + user_base_dn, + filterstr=search_filter, + attrlist=['dn'], + override_search='_regular_search' + ) if len(_results) == 1: (_user_dn, _user_attrs) = _results[0] @@ -2469,10 +2483,10 @@ if len(login.split('@')) < 2: search_filter = "(uid=%s)" % (login) _results = self._search( - domain, - filterstr=search_filter, - attrlist=[ 'dn' ] - ) + domain, + filterstr=search_filter, + attrlist=['dn'] + ) if len(_results) == 1: (_user_dn, _user_attrs) = _results[0] @@ -2485,53 +2499,58 @@ return _user_dn def _kolab_domain_root_dn(self, domain): - log.debug(_("Searching root dn for domain %r") % (domain), level=8) + log.debug(_l("Searching root dn for domain %r") % (domain), level=8) if not hasattr(self, 'domain_rootdns'): self.domain_rootdns = {} - if self.domain_rootdns.has_key(domain): - log.debug(_("Returning from cache: %r") % (self.domain_rootdns[domain]), level=8) + if domain in self.domain_rootdns: + log.debug(_l("Returning from cache: %r") % (self.domain_rootdns[domain]), level=8) return self.domain_rootdns[domain] self._bind() - log.debug(_("Finding domain root dn for domain %s") % (domain), level=8) + log.debug(_l("Finding domain root dn for domain %s") % (domain), level=8) domain_base_dn = conf.get('ldap', 'domain_base_dn', quiet=True) domain_filter = conf.get('ldap', 'domain_filter') - if not domain_filter == None: - if not domain == None: + if domain_filter is not None: + if domain is not None: domain_filter = domain_filter.replace('*', domain) if not domain_base_dn == "": _results = self._search( - domain_base_dn, - ldap.SCOPE_SUBTREE, - domain_filter, - override_search='_regular_search' - ) - - domains = [] + domain_base_dn, + ldap.SCOPE_SUBTREE, + domain_filter, + override_search='_regular_search' + ) for _domain in _results: (domain_dn, _domain_attrs) = _domain domain_rootdn_attribute = conf.get( - 'ldap', - 'domain_rootdn_attribute' - ) + 'ldap', + 'domain_rootdn_attribute' + ) _domain_attrs = utils.normalize(_domain_attrs) - if _domain_attrs.has_key(domain_rootdn_attribute): - log.debug(_("Setting domain root dn from LDAP for domain %r: %r") % (domain, _domain_attrs[domain_rootdn_attribute]), level=8) + if domain_rootdn_attribute in _domain_attrs: + log.debug( + _l("Setting domain root dn from LDAP for domain %r: %r") % ( + domain, + _domain_attrs[domain_rootdn_attribute] + ), + level=8 + ) + self.domain_rootdns[domain] = _domain_attrs[domain_rootdn_attribute] return _domain_attrs[domain_rootdn_attribute] else: domain_name_attribute = self.config_get('domain_name_attribute') - if domain_name_attribute == None: + if domain_name_attribute is None: domain_name_attribute = 'associateddomain' if isinstance(_domain_attrs[domain_name_attribute], list): @@ -2554,11 +2573,11 @@ _filter = "(|" for _type in ['user', 'group', 'resource', 'sharedfolder']: __filter = self.config_get('kolab_%s_filter' % (_type)) - if __filter == None: + if __filter is None: __filter = self.config_get('%s_filter' % (_type)) - if not __filter == None: - _filter = "%s%s" % (_filter,__filter) + if __filter is not None: + _filter = "%s%s" % (_filter, __filter) _filter = "%s)" % (_filter) @@ -2575,7 +2594,7 @@ This function should only be called by the primary instance of Auth. """ - log.debug(_("Listing domains..."), level=8) + log.debug(_l("Listing domains..."), level=8) self.connect() self._bind() @@ -2590,27 +2609,28 @@ # If we haven't returned already, let's continue searching domain_filter = conf.get('ldap', 'domain_filter') - if not domain == None: + if domain is not None: domain_filter = domain_filter.replace('*', domain) - if domain_base_dn == None or domain_filter == None: + if domain_base_dn is None or domain_filter is None: return [] dna = self.config_get('domain_name_attribute') - if dna == None: + if dna is None: dna = 'associateddomain' try: _search = self._search( - domain_base_dn, - ldap.SCOPE_SUBTREE, - domain_filter, - # TODO: Where we use associateddomain is actually - # configurable - [ dna ], - override_search='_regular_search' - ) - except: + domain_base_dn, + ldap.SCOPE_SUBTREE, + domain_filter, + # TODO: Where we use associateddomain is actually + # configurable + [dna], + override_search='_regular_search' + ) + + except Exception: return [] domains = [] @@ -2628,7 +2648,7 @@ else: primary_domain = domain_attrs[dna].lower() - domains.append((primary_domain,secondary_domains)) + domains.append((primary_domain, secondary_domains)) return domains @@ -2642,28 +2662,37 @@ """ log.debug( - "auth.ldap.LDAP._synchronize_callback(args %r, kw %r)" % ( - args, - kw - ), - level=8 - ) + "auth.ldap.LDAP._synchronize_callback(args %r, kw %r)" % ( + args, + kw + ), + level=8 + ) # Typical for Persistent Change Control EntryChangeNotification - if kw.has_key('change_type'): - - log.debug(_("change_type defined, typical for Persistent Change Control EntryChangeNotification"), level=5) - - change_type = None + if 'change_type' in kw: + log.debug( + _l( + "change_type defined, typical for Persistent Change " + + "Control EntryChangeNotification" + ), + level=5 + ) change_dict = { - 'change_type': kw['change_type'], - 'previous_dn': kw['previous_dn'], - 'change_number': kw['change_number'], - 'dn': kw['dn'] - } + 'change_type': kw['change_type'], + 'previous_dn': kw['previous_dn'], + 'change_number': kw['change_number'], + 'dn': kw['dn'] + } entry = utils.normalize(kw['entry']) + + # Ignore nstombstone objects + if 'objectclass' in entry: + if 'nstombstone' in entry['objectclass']: + return None + entry['dn'] = kw['dn'] unique_attr = self.config_get('unique_attribute') @@ -2671,12 +2700,12 @@ try: entry['type'] = self._entry_type(entry) - except: + except Exception: entry['type'] = None - log.debug(_("Entry type: %s") % (entry['type']), level=8) + log.debug(_l("Entry type: %s") % (entry['type']), level=8) - if change_dict['change_type'] == None: + if change_dict['change_type'] is None: # This entry was in the start result set eval("self._change_none_%s(entry, change_dict)" % (entry['type'])) else: @@ -2689,43 +2718,47 @@ # See if we can find the cache entry - this way we can get to # the value of a (former, on a deleted entry) result_attribute result_attribute = conf.get('cyrus-sasl', 'result_attribute') - if not entry.has_key(result_attribute): + if result_attribute not in entry: cache_entry = cache.get_entry(self.domain, entry, update=False) if hasattr(cache_entry, 'result_attribute') and change == 'delete': entry[result_attribute] = cache_entry.result_attribute eval( - "self._change_%s_%s(entry, change_dict)" % ( - change, - entry['type'] - ) + "self._change_%s_%s(entry, change_dict)" % ( + change, + entry['type'] ) + ) # Typical for Paged Results Control - elif kw.has_key('entry') and isinstance(kw['entry'], list): + elif 'entry' in kw and isinstance(kw['entry'], list): + log.debug(_l("No change_type, typical for Paged Results Control"), level=5) - log.debug(_("No change_type, typical for Paged Results Control"), level=5) - - for entry_dn,entry_attrs in kw['entry']: + for entry_dn, entry_attrs in kw['entry']: # This is a referral - if entry_dn == None: + if entry_dn is None: continue - entry = { 'dn': entry_dn } + entry = {'dn': entry_dn} entry_attrs = utils.normalize(entry_attrs) for attr in entry_attrs.keys(): entry[attr.lower()] = entry_attrs[attr] + # Ignore nstombstone objects + if 'objectclass' in entry: + if 'nstombstone' in entry['objectclass']: + return None + unique_attr = self.config_get('unique_attribute').lower() entry['id'] = entry[unique_attr] try: entry['type'] = self._entry_type(entry) - except: + except Exception: entry['type'] = "unknown" - log.debug(_("Entry type for dn: %s is: %s") % (entry['dn'], entry['type']), level=8) + log.debug(_l("Entry type for dn: %s is: %s") % (entry['dn'], entry['type']), level=8) eval("self._change_none_%s(entry, None)" % (entry['type'])) @@ -2733,7 +2766,7 @@ # # rcpt_addrs = self.recipient_policy(entry) # -# log.debug(_("Recipient Addresses: %r") % (rcpt_addrs), level=8) +# log.debug(_l("Recipient Addresses: %r") % (rcpt_addrs), level=8) # # for key in rcpt_addrs.keys(): # entry[key] = rcpt_addrs[key] @@ -2750,67 +2783,67 @@ # server = self.imap.user_mailbox_server(folder) ### - ### Backend search functions + # Backend search functions ### - def _persistent_search(self, - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr="(objectClass=*)", - attrlist=None, - attrsonly=0, - timeout=-1, - callback=False, - primary_domain=None, - secondary_domains=[] - ): - - _results = [] + def _persistent_search( + self, + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr="(objectClass=*)", + attrlist=None, + attrsonly=0, + timeout=-1, + callback=False, + primary_domain=None, + secondary_domains=[] + ): psearch_server_controls = [] - psearch_server_controls.append(psearch.PersistentSearchControl( - criticality=True, - changeTypes=[ 'add', 'delete', 'modify', 'modDN' ], - changesOnly=False, - returnECs=True - ) + psearch_server_controls.append( + ldap.controls.psearch.PersistentSearchControl( + criticality=True, + changeTypes=['add', 'delete', 'modify', 'modDN'], + changesOnly=False, + returnECs=True ) + ) _search = self.ldap.search_ext( - base_dn, - scope=scope, - filterstr=filterstr, - attrlist=attrlist, - attrsonly=attrsonly, - timeout=timeout, - serverctrls=psearch_server_controls - ) + base_dn, + scope=scope, + filterstr=filterstr, + attrlist=attrlist, + attrsonly=attrsonly, + timeout=timeout, + serverctrls=psearch_server_controls + ) ecnc = psearch.EntryChangeNotificationControl while True: - res_type,res_data,res_msgid,_None,_None,_None = self.ldap.result4( - _search, - all=0, - add_ctrls=1, - add_intermediates=1, - resp_ctrl_classes={ecnc.controlType:ecnc} - ) + res_type, res_data, res_msgid, _None, _None, _None = self.ldap.result4( + _search, + all=0, + add_ctrls=1, + add_intermediates=1, + resp_ctrl_classes={ecnc.controlType: ecnc} + ) change_type = None previous_dn = None change_number = None - for dn,entry,srv_ctrls in res_data: - log.debug(_("LDAP Search Result Data Entry:"), level=8) + for dn, entry, srv_ctrls in res_data: + log.debug(_l("LDAP Search Result Data Entry:"), level=8) log.debug(" DN: %r" % (dn), level=8) log.debug(" Entry: %r" % (entry), level=8) ecn_ctrls = [ - c for c in srv_ctrls - if c.controlType == ecnc.controlType - ] + c for c in srv_ctrls + if c.controlType == ecnc.controlType + ] if ecn_ctrls: change_type = ecn_ctrls[0].changeType @@ -2819,76 +2852,76 @@ change_type_desc = psearch.CHANGE_TYPES_STR[change_type] log.debug( - _("Entry Change Notification attributes:"), - level=8 - ) + _l("Entry Change Notification attributes:"), + level=8 + ) log.debug( - " " + _("Change Type: %r (%r)") % ( - change_type, - change_type_desc - ), - level=8 - ) + " " + _l("Change Type: %r (%r)") % ( + change_type, + change_type_desc + ), + level=8 + ) log.debug( - " " + _("Previous DN: %r") % (previous_dn), - level=8 - ) + " " + _l("Previous DN: %r") % (previous_dn), + level=8 + ) if callback: callback( - dn=dn, - entry=entry, - previous_dn=previous_dn, - change_type=change_type, - change_number=change_number, - primary_domain=primary_domain, - secondary_domains=secondary_domains - ) - - def _paged_search(self, - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr="(objectClass=*)", - attrlist=None, - attrsonly=0, - timeout=-1, - callback=False, - primary_domain=None, - secondary_domains=[] - ): + dn=dn, + entry=entry, + previous_dn=previous_dn, + change_type=change_type, + change_number=change_number, + primary_domain=primary_domain, + secondary_domains=secondary_domains + ) + + def _paged_search( + self, + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr="(objectClass=*)", + attrlist=None, + attrsonly=0, + timeout=-1, + callback=False, + primary_domain=None, + secondary_domains=[] + ): page_size = 500 - critical = True _results = [] - server_page_control = SimplePagedResultsControl(page_size=page_size) + server_page_control = ldap.controls.libldap.SimplePagedResultsControl(size=page_size) _search = self.ldap.search_ext( - base_dn, - scope=scope, - filterstr=filterstr, - attrlist=attrlist, - attrsonly=attrsonly, - serverctrls=[server_page_control] - ) + base_dn, + scope=scope, + filterstr=filterstr, + attrlist=attrlist, + attrsonly=attrsonly, + serverctrls=[server_page_control] + ) pages = 0 while True: pages += 1 try: ( - _result_type, - _result_data, - _result_msgid, - _result_controls - ) = self.ldap.result3(_search) + _result_type, + _result_data, + _result_msgid, + _result_controls + ) = self.ldap.result3(_search) - except ldap.NO_SUCH_OBJECT, e: + except ldap.NO_SUCH_OBJECT: log.warning( - _("Object %s searched no longer exists") % (base_dn) - ) + _l("Object %s searched no longer exists") % (base_dn) + ) break @@ -2900,12 +2933,12 @@ _results.extend(_result_data) if (pages % 2) == 0: - log.debug(_("%d results...") % (len(_results))) + log.debug(_l("%d results...") % (len(_results))) pctrls = [ - c for c in _result_controls - if c.controlType == LDAP_CONTROL_PAGED_RESULTS - ] + c for c in _result_controls + if c.controlType == LDAP_CONTROL_PAGED_RESULTS + ] if pctrls: if hasattr(pctrls[0], 'size'): @@ -2917,47 +2950,49 @@ if cookie: server_page_control.cookie = cookie _search = self.ldap.search_ext( - base_dn, - scope=scope, - filterstr=filterstr, - attrlist=attrlist, - attrsonly=attrsonly, - serverctrls=[server_page_control] - ) + base_dn, + scope=scope, + filterstr=filterstr, + attrlist=attrlist, + attrsonly=attrsonly, + serverctrls=[server_page_control] + ) else: # TODO: Error out more verbose break else: # TODO: Error out more verbose - print "Warning: Server ignores RFC 2696 control." + print("Warning: Server ignores RFC 2696 control.") break return _results - def _vlv_search(self, - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr="(objectClass=*)", - attrlist=None, - attrsonly=0, - timeout=-1, - callback=False, - primary_domain=None, - secondary_domains=[] - ): + def _vlv_search( + self, + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr="(objectClass=*)", + attrlist=None, + attrsonly=0, + timeout=-1, + callback=False, + primary_domain=None, + secondary_domains=[] + ): pass - def _sync_repl(self, - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr="(objectClass=*)", - attrlist=None, - attrsonly=0, - timeout=-1, - callback=False, - primary_domain=None, - secondary_domains=[] - ): + def _sync_repl( + self, + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr="(objectClass=*)", + attrlist=None, + attrsonly=0, + timeout=-1, + callback=False, + primary_domain=None, + secondary_domains=[] + ): import ldapurl import syncrepl @@ -2965,12 +3000,12 @@ ldap_url = ldapurl.LDAPUrl(self.config_get('ldap_uri')) ldap_sync_conn = syncrepl.DNSync( - '/var/lib/kolab/syncrepl_%s.db' % (self.domain), - ldap_url.initializeUrl(), - trace_level=2, - trace_file=pykolab.logger.StderrToLogger(log), - callback=self._synchronize_callback - ) + '/var/lib/kolab/syncrepl_%s.db' % (self.domain), + ldap_url.initializeUrl(), + trace_level=2, + trace_file=pykolab.logger.StderrToLogger(log), + callback=self._synchronize_callback + ) bind_dn = self.config_get('bind_dn') bind_pw = self.config_get('bind_pw') @@ -2978,12 +3013,12 @@ ldap_sync_conn.simple_bind_s(bind_dn, bind_pw) msgid = ldap_sync_conn.syncrepl_search( - base_dn, - scope, - mode='refreshAndPersist', - filterstr=filterstr, - attrlist=attrlist, - ) + base_dn, + scope, + mode='refreshAndPersist', + filterstr=filterstr, + attrlist=attrlist, + ) try: # Here's where returns need to be taken into account... @@ -2992,27 +3027,31 @@ except KeyboardInterrupt: pass - def _regular_search(self, - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr="(objectClass=*)", - attrlist=None, - attrsonly=0, - timeout=-1, - callback=False, - primary_domain=None, - secondary_domains=[] - ): + def _regular_search( + self, + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr="(objectClass=*)", + attrlist=None, + attrsonly=0, + timeout=None, + callback=False, + primary_domain=None, + secondary_domains=[] + ): - log.debug(_("Searching with filter %r") % (filterstr), level=8) + if timeout is None: + timeout = self.config_get('ldap', 'timeout', 10) + + log.debug(_l("Searching with filter %r") % (filterstr), level=8) _search = self.ldap.search( - base_dn, - scope=scope, - filterstr=filterstr, - attrlist=attrlist, - attrsonly=attrsonly - ) + base_dn, + scope=scope, + filterstr=filterstr, + attrlist=attrlist, + attrsonly=attrsonly + ) _results = [] _result_type = None @@ -3020,24 +3059,25 @@ while not _result_type == ldap.RES_SEARCH_RESULT: (_result_type, _result) = self.ldap.result(_search, False, 0) - if not _result == None: + if _result is not None: for result in _result: _results.append(result) return _results - def _search(self, - base_dn, - scope=ldap.SCOPE_SUBTREE, - filterstr="(objectClass=*)", - attrlist=None, - attrsonly=0, - timeout=-1, - override_search=False, - callback=False, - primary_domain=None, - secondary_domains=[] - ): + def _search( + self, + base_dn, + scope=ldap.SCOPE_SUBTREE, + filterstr="(objectClass=*)", + attrlist=None, + attrsonly=0, + timeout=None, + override_search=False, + callback=False, + primary_domain=None, + secondary_domains=[] + ): """ Search LDAP. @@ -3045,50 +3085,54 @@ the first one supported. """ + if timeout is None: + timeout = self.config_get('timeout', default=10) + supported_controls = conf.get_list('ldap', 'supported_controls') - if not supported_controls == None and not len(supported_controls) < 1: + if supported_controls is not None and not len(supported_controls) < 1: for control_num in [(int)(x) for x in supported_controls]: self.ldap.supported_controls.append( - SUPPORTED_LDAP_CONTROLS[control_num]['func'] - ) + SUPPORTED_LDAP_CONTROLS[control_num]['func'] + ) if len(self.ldap.supported_controls) < 1: for control_num in SUPPORTED_LDAP_CONTROLS.keys(): log.debug( - _("Checking for support for %s on %s") % ( - SUPPORTED_LDAP_CONTROLS[control_num]['desc'], - self.domain - ), - level=8 - ) + _l("Checking for support for %s on %s") % ( + SUPPORTED_LDAP_CONTROLS[control_num]['desc'], + self.domain + ), + level=8 + ) _search = self.ldap.search_s( - '', - scope=ldap.SCOPE_BASE, - attrlist=['supportedControl'] - ) + '', + scope=ldap.SCOPE_BASE, + attrlist=['supportedControl'] + ) - for (_result,_supported_controls) in _search: + for (_result, _supported_controls) in _search: supported_controls = _supported_controls.values()[0] for control_num in SUPPORTED_LDAP_CONTROLS.keys(): if SUPPORTED_LDAP_CONTROLS[control_num]['oid'] in \ supported_controls: - log.debug(_("Found support for %s") % ( - SUPPORTED_LDAP_CONTROLS[control_num]['desc'], - ), - level=8 - ) + log.debug( + _l("Found support for %s") % ( + SUPPORTED_LDAP_CONTROLS[control_num]['desc'], + ), + level=8 + ) self.ldap.supported_controls.append( - SUPPORTED_LDAP_CONTROLS[control_num]['func'] - ) + SUPPORTED_LDAP_CONTROLS[control_num]['func'] + ) _results = [] - if not override_search == False: - _use_ldap_controls = [ override_search ] + if override_search is not False: + _use_ldap_controls = [override_search] else: _use_ldap_controls = self.ldap.supported_controls @@ -3101,7 +3145,8 @@ while not failed_ok: try: - exec("""_results = self.%s( + exec( + """_results = self.%s( %r, scope=%r, filterstr=%r, @@ -3112,33 +3157,43 @@ primary_domain=%r, secondary_domains=%r )""" % ( - supported_control, - base_dn, - scope, - filterstr, - attrlist, - attrsonly, - timeout, - primary_domain, - secondary_domains - ) + supported_control, + base_dn, + scope, + filterstr, + attrlist, + attrsonly, + timeout, + primary_domain, + secondary_domains ) + ) break - except ldap.SERVER_DOWN, errmsg: - log.error(_("LDAP server unavailable: %r") % (errmsg)) - log.error(_("%s") % (traceback.format_exc())) - log.error(_("-- reconnecting in 10 seconds.")) + except ldap.SERVER_DOWN as errmsg: + log.error(_l("LDAP server unavailable: %r") % (errmsg)) + log.error(_l("%s") % (traceback.format_exc())) + log.error(_l("-- reconnecting in 10 seconds.")) + + self._disconnect() + + time.sleep(10) + self.reconnect() + + except ldap.TIMEOUT: + log.error(_l("LDAP timeout in searching for '%s'") % (filterstr)) + + self._disconnect() time.sleep(10) self.reconnect() - except Exception, errmsg: + except Exception as errmsg: failed_ok = True - log.error(_("An error occured using %s: %r") % (supported_control, errmsg)) - log.error(_("%s") % (traceback.format_exc())) + log.error(_l("An error occured using %s: %r") % (supported_control, errmsg)) + log.error(_l("%s") % (traceback.format_exc())) continue @@ -3153,7 +3208,7 @@ if acl is not None: if not isinstance(acl, list): - acl = [ acl ] + acl = [acl] for acl_entry in acl: # entry already converted to IMAP format?
View file
pykolab-0.8.15.tar.gz/pykolab/auth/ldap/auth_cache.py -> pykolab-0.8.16.tar.gz/pykolab/auth/ldap/auth_cache.py
Changed
@@ -25,30 +25,18 @@ from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import String -from sqlalchemy import Table from sqlalchemy import Text -from sqlalchemy import desc from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import mapper -try: - from sqlalchemy.orm import relationship -except: - from sqlalchemy.orm import relation as relationship - -try: - from sqlalchemy.orm import sessionmaker -except: - from sqlalchemy.orm import create_session +from sqlalchemy.orm import sessionmaker import pykolab -from pykolab import utils from pykolab.constants import KOLAB_LIB_PATH -from pykolab.translate import _ +# pylint: disable=invalid-name conf = pykolab.getConf() log = pykolab.getLogger('pykolab.auth_cache') @@ -56,12 +44,19 @@ db = None -## -## Classes -## +try: + unicode('') +except NameError: + unicode = str + +# +# Classes +# DeclarativeBase = declarative_base() + +# pylint: disable=too-few-public-methods class Entry(DeclarativeBase): __tablename__ = 'entries' @@ -78,39 +73,45 @@ else: self.value = value -## -## Functions -## +# +# Functions +# + def del_entry(key): + # pylint: disable=global-statement + global db + db = init_db() try: - _entries = db.query(Entry).filter_by(key=key).delete() - except sqlalchemy.exc.OperationalError, errmsg: + db.query(Entry).filter_by(key=key).delete() + except sqlalchemy.exc.OperationalError: db = init_db(reinit=True) - except sqlalchemy.exc.InvalidRequest, errmsg: + except sqlalchemy.exc.InvalidRequest: db = init_db(reinit=True) finally: - _entries = db.query(Entry).filter_by(key=key).delete() + db.query(Entry).filter_by(key=key).delete() db.commit() + def get_entry(key): + # pylint: disable=global-statement + global db + db = init_db() try: _entries = db.query(Entry).filter_by(key=key).all() - except sqlalchemy.exc.OperationalError, errmsg: + except sqlalchemy.exc.OperationalError: db = init_db(reinit=True) - except sqlalchemy.exc.InvalidRequest, errmsg: + except sqlalchemy.exc.InvalidRequest: db = init_db(reinit=True) finally: _entries = db.query(Entry).filter_by(key=key).all() - if len(_entries) == 0: - return None - if len(_entries) > 1: + if len(_entries) != 1: return None log.debug("Entry found: %r" % (_entries[0].__dict__)) @@ -118,26 +119,23 @@ return _entries[0].value.encode('utf-8', 'latin1') + def set_entry(key, value): db = init_db() + try: _entries = db.query(Entry).filter_by(key=key).all() - except sqlalchemy.exc.OperationalError, errmsg: + except sqlalchemy.exc.OperationalError: db = init_db(reinit=True) - except sqlalchemy.exc.InvalidRequest, errmsg: + except sqlalchemy.exc.InvalidRequest: db = init_db(reinit=True) finally: _entries = db.query(Entry).filter_by(key=key).all() - if len(_entries) == 0: - db.add( - Entry( - key, - value - ) - ) - + if not _entries: + db.add(Entry(key, value)) db.commit() + elif len(_entries) == 1: if not isinstance(value, unicode): value = unicode(value, 'utf-8') @@ -148,21 +146,28 @@ _entries[0].last_change = datetime.datetime.now() db.commit() + def purge_entries(db): - db.query(Entry).filter(Entry.last_change <= (datetime.datetime.now() - datetime.timedelta(1))).delete() + db.query(Entry).filter( + Entry.last_change <= (datetime.datetime.now() - datetime.timedelta(1)) + ).delete() + db.commit() + def init_db(reinit=False): """ Returns a SQLAlchemy Session() instance. """ + # pylint: disable=global-statement global db - if not db == None and not reinit: + if db is not None and not reinit: return db db_uri = conf.get('ldap', 'auth_cache_uri') - if db_uri == None: + + if db_uri is None: db_uri = 'sqlite:///%s/auth_cache.db' % (KOLAB_LIB_PATH) if reinit:
View file
pykolab-0.8.15.tar.gz/pykolab/auth/ldap/cache.py -> pykolab-0.8.16.tar.gz/pykolab/auth/ldap/cache.py
Changed
@@ -18,6 +18,8 @@ import datetime +from uuid import UUID + import sqlalchemy from sqlalchemy import Column @@ -25,42 +27,44 @@ from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import String -from sqlalchemy import Table from sqlalchemy import desc from sqlalchemy import create_engine -from sqlalchemy.orm import mapper - -from uuid import UUID +from sqlalchemy.ext.declarative import declarative_base -try: - from sqlalchemy.orm import relationship -except: - from sqlalchemy.orm import relation as relationship - -try: - from sqlalchemy.orm import sessionmaker -except: - from sqlalchemy.orm import create_session +from sqlalchemy.orm import sessionmaker import pykolab -from pykolab import utils from pykolab.constants import KOLAB_LIB_PATH from pykolab.translate import _ +# pylint: disable=invalid-name conf = pykolab.getConf() -log = pykolab.getLogger('pykolab.auth_cache') +log = pykolab.getLogger('pykolab.cache') metadata = MetaData() db = {} -## -## Classes -## +# +# Classes +# + +DeclarativeBase = declarative_base() + + +# pylint: disable=too-few-public-methods +class Entry(DeclarativeBase): + __tablename__ = 'entries' + + last_change = None + + id = Column(Integer, primary_key=True) + uniqueid = Column(String(128), nullable=False) + result_attribute = Column(String(128), nullable=False) + last_change = Column(DateTime, nullable=False, default=datetime.datetime.now()) -class Entry(object): def __init__(self, uniqueid, result_attr, last_change): self.uniqueid = uniqueid self.result_attribute = result_attr @@ -72,83 +76,64 @@ ).replace('%%', '%') self.last_change = datetime.datetime.strptime( - last_change, + last_change, modifytimestamp_format ) -## -## Tables -## - -entry_table = Table( - 'entry', metadata, - Column('id', Integer, primary_key=True), - Column('uniqueid', String(128), nullable=False), - Column('result_attribute', String(128), nullable=False), - Column('last_change', DateTime), - ) - -## -## Table <-> Class Mappers -## -mapper(Entry, entry_table) +# +# Functions +# -## -## Functions -## def delete_entry(domain, entry): - result_attribute = conf.get_raw('cyrus-sasl', 'result_attribute') + _db = init_db(domain) + _entry = _db.query(Entry).filter_by(uniqueid=entry['id']).first() - db = init_db(domain) - _entry = db.query(Entry).filter_by(uniqueid=entry['id']).first() + if _entry is not None: + _db.delete(_entry) + _db.commit() - if not _entry == None: - db.delete(_entry) - db.commit() def get_entry(domain, entry, update=True): result_attribute = conf.get_raw('cyrus-sasl', 'result_attribute') _entry = None - db = init_db(domain) + _db = init_db(domain) try: _uniqueid = str(UUID(bytes_le=entry['id'])) - log.debug(_("Entry uniqueid was converted from binary form to string: %s") % _uniqueid, level=8) + log.debug( + _("Entry uniqueid was converted from binary form to string: %s") % _uniqueid, + level=8 + ) + except ValueError: _uniqueid = entry['id'] try: - _entry = db.query(Entry).filter_by(uniqueid=_uniqueid).first() - except sqlalchemy.exc.OperationalError, errmsg: - db = init_db(domain,reinit=True) - except sqlalchemy.exc.InvalidRequestError, errmsg: - db = init_db(domain,reinit=True) + _entry = _db.query(Entry).filter_by(uniqueid=_uniqueid).first() + except sqlalchemy.exc.OperationalError: + _db = init_db(domain, reinit=True) + except sqlalchemy.exc.InvalidRequestError: + _db = init_db(domain, reinit=True) finally: - _entry = db.query(Entry).filter_by(uniqueid=_uniqueid).first() + _entry = _db.query(Entry).filter_by(uniqueid=_uniqueid).first() if not update: return _entry - if _entry == None: + if _entry is None: log.debug(_("Inserting cache entry %r") % (_uniqueid), level=8) - if not entry.has_key(result_attribute): + if result_attribute not in entry: entry[result_attribute] = '' - db.add( - Entry( - _uniqueid, - entry[result_attribute], - entry['modifytimestamp'] - ) - ) + _db.add(Entry(_uniqueid, entry[result_attribute], entry['modifytimestamp'])) - db.commit() - _entry = db.query(Entry).filter_by(uniqueid=_uniqueid).first() + _db.commit() + _entry = _db.query(Entry).filter_by(uniqueid=_uniqueid).first() else: modifytimestamp_format = conf.get_raw( 'ldap', @@ -158,24 +143,30 @@ if not _entry.last_change.strftime(modifytimestamp_format) == entry['modifytimestamp']: log.debug(_("Updating timestamp for cache entry %r") % (_uniqueid), level=8) - last_change = datetime.datetime.strptime(entry['modifytimestamp'], modifytimestamp_format) + last_change = datetime.datetime.strptime( + entry['modifytimestamp'], + modifytimestamp_format + ) + _entry.last_change = last_change - db.commit() - _entry = db.query(Entry).filter_by(uniqueid=_uniqueid).first() + _db.commit() + _entry = _db.query(Entry).filter_by(uniqueid=_uniqueid).first() - if entry.has_key(result_attribute): + if result_attribute in entry: if not _entry.result_attribute == entry[result_attribute]: log.debug(_("Updating result_attribute for cache entry %r") % (_uniqueid), level=8) _entry.result_attribute = entry[result_attribute] - db.commit() - _entry = db.query(Entry).filter_by(uniqueid=_uniqueid).first() + _db.commit() + _entry = _db.query(Entry).filter_by(uniqueid=_uniqueid).first() return _entry -def init_db(domain,reinit=False): + +def init_db(domain, reinit=False): """ Returns a SQLAlchemy Session() instance. """ + # pylint: disable=global-statement global db if domain in db and not reinit: @@ -191,9 +182,10 @@ try: engine = create_engine(db_uri, echo=echo) - metadata.create_all(engine) - except: + DeclarativeBase.metadata.create_all(engine) + except Exception: engine = create_engine('sqlite://') + DeclarativeBase.metadata.create_all(engine) metadata.create_all(engine) Session = sessionmaker(bind=engine) @@ -201,6 +193,7 @@ return db[domain] + def last_modify_timestamp(domain): modifytimestamp_format = conf.get_raw( 'ldap', @@ -209,12 +202,13 @@ ).replace('%%', '%') try: - db = init_db(domain) - last_change = db.query(Entry).order_by(desc(Entry.last_change)).first() + _db = init_db(domain) + last_change = _db.query(Entry).order_by(desc(Entry.last_change)).first() - if not last_change == None: + if last_change is not None: return last_change.last_change.strftime(modifytimestamp_format) - else: - return datetime.datetime(1900, 01, 01, 00, 00, 00).strftime(modifytimestamp_format) - except: - return datetime.datetime(1900, 01, 01, 00, 00, 00).strftime(modifytimestamp_format) + + return datetime.datetime(1900, 1, 1, 00, 00, 00).strftime(modifytimestamp_format) + + except Exception: + return datetime.datetime(1900, 1, 1, 00, 00, 00).strftime(modifytimestamp_format)
View file
pykolab-0.8.15.tar.gz/pykolab/base.py -> pykolab-0.8.16.tar.gz/pykolab/base.py
Changed
@@ -19,14 +19,16 @@ import pykolab from pykolab.imap import IMAP +# pylint: disable=invalid-name conf = pykolab.getConf() -class Base(object): + +class Base: """ Abstraction class for functions commonly shared between auth, imap, etc. """ def __init__(self, *args, **kw): - if kw.has_key('domain') and not kw['domain'] == None: + if 'domain' in kw and kw['domain'] is not None: self.domain = kw['domain'] else: self.domain = conf.get('kolab', 'primary_domain') @@ -38,9 +40,9 @@ self.imap = IMAP() self.domain_rootdns = {} - def config_get(self, key1, key2=None): - if not key2 == None: - return conf.get(key1, key2) + def config_get(self, key1, key2=None, default=None): + if key2 is not None: + return conf.get(key1, key2, default=default) if conf.has_option(self.domain, key1): return conf.get(self.domain, key1) @@ -55,11 +57,11 @@ if conf.has_option('kolab', key1): return conf.get('kolab', key1) - return None + return default - def config_get_list(self, key1, key2=None): - if not key2 == None: - return conf.get_list(key1, key2) + def config_get_list(self, key1, key2=None, default=None): + if key2 is not None: + return conf.get_list(key1, key2, default=default) if conf.has_option(self.domain, key1): return conf.get_list(self.domain, key1) @@ -74,11 +76,11 @@ if conf.has_option('kolab', key1): return conf.get_list('kolab', key1) - return None + return default - def config_get_raw(self, key1, key2=None): - if not key2 == None: - return conf.get_raw(key1, key2) + def config_get_raw(self, key1, key2=None, default=None): + if key2 is not None: + return conf.get_raw(key1, key2, default=default) if conf.has_option(self.domain, key1): return conf.get_raw(self.domain, key1) @@ -93,5 +95,4 @@ if conf.has_option('kolab', key1): return conf.get_raw('kolab', key1) - return None - + return default
View file
pykolab-0.8.15.tar.gz/pykolab/conf/__init__.py -> pykolab-0.8.16.tar.gz/pykolab/conf/__init__.py
Changed
@@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # +from __future__ import print_function import logging import os @@ -33,6 +34,7 @@ log = pykolab.getLogger('pykolab.conf') + class Conf(object): def __init__(self): """ @@ -53,7 +55,7 @@ try: from pykolab.conf.entitlement import Entitlement entitlements = True - except: + except Exception: entitlements = False pass @@ -68,7 +70,7 @@ # Create the options self.create_options() - def finalize_conf(self,fatal=True): + def finalize_conf(self, fatal=True): self.create_options_from_plugins() self.parse_options(fatal=fatal) @@ -84,8 +86,15 @@ # But, they should be available in our class as well for option in self.defaults.__dict__.keys(): - log.debug(_("Setting %s to %r (from defaults)") % (option, self.defaults.__dict__[option]), level=8) - setattr(self,option,self.defaults.__dict__[option]) + log.debug( + _("Setting %s to %r (from defaults)") % ( + option, + self.defaults.__dict__[option] + ), + level=8 + ) + + setattr(self, option, self.defaults.__dict__[option]) # This is where we check our parser for the defaults being set there. self.set_defaults_from_cli_options() @@ -93,21 +102,41 @@ self.options_set_from_config() # Also set the cli options - if hasattr(self,'cli_keywords') and not self.cli_keywords == None: + if hasattr(self, 'cli_keywords') and self.cli_keywords is not None: for option in self.cli_keywords.__dict__.keys(): retval = False if hasattr(self, "check_setting_%s" % (option)): - exec("retval = self.check_setting_%s(%r)" % (option, self.cli_keywords.__dict__[option])) - - # The warning, error or confirmation dialog is in the check_setting_%s() function + exec( + "retval = self.check_setting_%s(%r)" % ( + option, + self.cli_keywords.__dict__[option] + ) + ) + + # The warning, error or confirmation dialog is in the check_setting_%s() + # function if not retval: continue - log.debug(_("Setting %s to %r (from CLI, verified)") % (option, self.cli_keywords.__dict__[option]), level=8) - setattr(self,option,self.cli_keywords.__dict__[option]) + log.debug( + _("Setting %s to %r (from CLI, verified)") % ( + option, + self.cli_keywords.__dict__[option] + ), + level=8 + ) + + setattr(self, option, self.cli_keywords.__dict__[option]) else: - log.debug(_("Setting %s to %r (from CLI, not checked)") % (option, self.cli_keywords.__dict__[option]), level=8) - setattr(self,option,self.cli_keywords.__dict__[option]) + log.debug( + _("Setting %s to %r (from CLI, not checked)") % ( + option, + self.cli_keywords.__dict__[option] + ), + level=8 + ) + + setattr(self, option, self.cli_keywords.__dict__[option]) def load_config(self, config): """ @@ -128,18 +157,18 @@ continue if isinstance(self.defaults.__dict__[section][key], int): - value = config.getint(section,key) + value = config.getint(section, key) elif isinstance(self.defaults.__dict__[section][key], bool): - value = config.getboolean(section,key) + value = config.getboolean(section, key) elif isinstance(self.defaults.__dict__[section][key], str): - value = config.get(section,key) + value = config.get(section, key) elif isinstance(self.defaults.__dict__[section][key], list): - value = eval(config.get(section,key)) + value = eval(config.get(section, key)) elif isinstance(self.defaults.__dict__[section][key], dict): - value = eval(config.get(section,key)) + value = eval(config.get(section, key)) - if hasattr(self,"check_setting_%s_%s" % (section,key)): - exec("retval = self.check_setting_%s_%s(%r)" % (section,key,value)) + if hasattr(self, "check_setting_%s_%s" % (section, key)): + exec("retval = self.check_setting_%s_%s(%r)" % (section, key, value)) if not retval: # We just don't set it, check_setting_%s should have # taken care of the error messages @@ -147,10 +176,25 @@ if not self.defaults.__dict__[section][key] == value: if key.count('password') >= 1: - log.debug(_("Setting %s_%s to '****' (from configuration file)") % (section,key), level=8) + log.debug( + _("Setting %s_%s to '****' (from configuration file)") % ( + section, + key + ), + level=8 + ) + else: - log.debug(_("Setting %s_%s to %r (from configuration file)") % (section,key,value), level=8) - setattr(self,"%s_%s" % (section,key),value) + log.debug( + _("Setting %s_%s to %r (from configuration file)") % ( + section, + key, + value + ), + level=8 + ) + + setattr(self, "%s_%s" % (section, key), value) def options_set_from_config(self): """ @@ -165,7 +209,7 @@ # Other then default? self.config_file = self.defaults.config_file - if hasattr(self,'cli_keywords') and not self.cli_keywords == None: + if hasattr(self, 'cli_keywords') and self.cli_keywords is not None: if not self.cli_keywords.config_file == self.defaults.config_file: self.config_file = self.cli_keywords.config_file @@ -185,28 +229,35 @@ retval = False if isinstance(self.defaults.__dict__['testing'][key], int): - value = config.getint('testing',key) + value = config.getint('testing', key) elif isinstance(self.defaults.__dict__['testing'][key], bool): - value = config.getboolean('testing',key) + value = config.getboolean('testing', key) elif isinstance(self.defaults.__dict__['testing'][key], str): - value = config.get('testing',key) + value = config.get('testing', key) elif isinstance(self.defaults.__dict__['testing'][key], list): - value = eval(config.get('testing',key)) + value = eval(config.get('testing', key)) elif isinstance(self.defaults.__dict__['testing'][key], dict): - value = eval(config.get('testing',key)) + value = eval(config.get('testing', key)) - if hasattr(self,"check_setting_%s_%s" % ('testing',key)): - exec("retval = self.check_setting_%s_%s(%r)" % ('testing',key,value)) + if hasattr(self, "check_setting_%s_%s" % ('testing', key)): + exec("retval = self.check_setting_%s_%s(%r)" % ('testing', key, value)) if not retval: # We just don't set it, check_setting_%s should have # taken care of the error messages continue - setattr(self,"%s_%s" % ('testing',key),value) + setattr(self, "%s_%s" % ('testing', key), value) if key.count('password') >= 1: - log.debug(_("Setting %s_%s to '****' (from configuration file)") % ('testing',key), level=8) + log.debug( + _("Setting %s_%s to '****' (from configuration file)") % ('testing', key), + level=8 + ) + else: - log.debug(_("Setting %s_%s to %r (from configuration file)") % ('testing',key,value), level=8) + log.debug( + _("Setting %s_%s to %r (from configuration file)") % ('testing', key, value), + level=8 + ) def check_config(self, val=None): """ @@ -214,7 +265,7 @@ and returns a SafeConfigParser instance if everything is OK. """ - if not val == None: + if val is not None: config_file = val else: config_file = self.config_file @@ -226,11 +277,13 @@ log.debug(_("Reading configuration file %s") % config_file, level=8) try: config.read(config_file) - except: + except Exception: log.error(_("Invalid configuration file %s") % config_file) if not config.has_section("kolab"): - log.warning(_("No master configuration section [kolab] in configuration file %s") % config_file) + log.warning( + _("No master configuration section [kolab] in configuration file %s") % config_file + ) return config @@ -258,58 +311,70 @@ # Enterprise Linux 5 does not have an "epilog" parameter to OptionParser try: self.cli_parser = OptionParser(epilog=epilog) - except: + except Exception: self.cli_parser = OptionParser() - ## - ## Runtime Options - ## + # + # Runtime Options + # runtime_group = self.cli_parser.add_option_group(_("Runtime Options")) - runtime_group.add_option( "-c", "--config", - dest = "config_file", - action = "store", - default = "/etc/kolab/kolab.conf", - help = _("Configuration file to use")) - - runtime_group.add_option( "-d", "--debug", - dest = "debuglevel", - type = 'int', - default = 0, - help = _("Set the debugging " + \ - "verbosity. Maximum is 9, tracing " + \ - "protocols like LDAP, SQL and IMAP.")) - - runtime_group.add_option( "-e", "--default", - dest = "answer_default", - action = "store_true", - default = False, - help = _("Use the default answer to all questions.")) - - runtime_group.add_option( "-l", - dest = "loglevel", - type = 'str', - default = "CRITICAL", - help = _("Set the logging level. " + \ - "One of info, warn, error, " + \ - "critical or debug")) - - runtime_group.add_option( "--logfile", - dest = "logfile", - action = "store", - default = "/var/log/kolab/pykolab.log", - help = _("Log file to use")) - - runtime_group.add_option( "-q", "--quiet", - dest = "quiet", - action = "store_true", - default = False, - help = _("Be quiet.")) - - runtime_group.add_option( "-y", "--yes", - dest = "answer_yes", - action = "store_true", - default = False, - help = _("Answer yes to all questions.")) + runtime_group.add_option( + "-c", "--config", + dest="config_file", + action="store", + default="/etc/kolab/kolab.conf", + help=_("Configuration file to use") + ) + + runtime_group.add_option( + "-d", "--debug", + dest="debuglevel", + type='int', + default=0, + help=_( + "Set the debugging verbosity. Maximum is 9, tracing protocols LDAP, SQL and IMAP." + ) + ) + + runtime_group.add_option( + "-e", "--default", + dest="answer_default", + action="store_true", + default=False, + help=_("Use the default answer to all questions.") + ) + + runtime_group.add_option( + "-l", + dest="loglevel", + type='str', + default="CRITICAL", + help=_("Set the logging level. One of info, warn, error, critical or debug") + ) + + runtime_group.add_option( + "--logfile", + dest="logfile", + action="store", + default="/var/log/kolab/pykolab.log", + help=_("Log file to use") + ) + + runtime_group.add_option( + "-q", "--quiet", + dest="quiet", + action="store_true", + default=False, + help=_("Be quiet.") + ) + + runtime_group.add_option( + "-y", "--yes", + dest="answer_yes", + action="store_true", + default=False, + help=_("Answer yes to all questions.") + ) def parse_options(self, fatal=True): """ @@ -323,15 +388,18 @@ """ Run Forest, RUN! """ - - exitcode = 0 - if self.cli_args: if len(self.cli_args) >= 1: - if hasattr(self,"command_%s" % self.cli_args[0].replace('-','_')): - exec("self.command_%s(%r)" % (self.cli_args[0].replace('-','_'), self.cli_args[1:])) + if hasattr(self, "command_%s" % self.cli_args[0].replace('-', '_')): + exec( + "self.command_%s(%r)" % ( + self.cli_args[0].replace('-', '_'), + self.cli_args[1:] + ) + ) + else: - print >> sys.stderr, _("No command supplied") + print(_("No command supplied"), file=sys.stderr) def command_dump(self, *args, **kw): """ @@ -342,7 +410,7 @@ self.read_config() if not self.cfg_parser.has_section('kolab'): - print "No section found for kolab" + print("No section found for kolab", file=sys.stderr) sys.exit(1) # Get the sections, and then walk through the sections in a @@ -352,22 +420,22 @@ items.sort() for item in items: - mode = self.cfg_parser.get('kolab',item) - print "%s = %s" % (item,mode) + mode = self.cfg_parser.get('kolab', item) + print("%s = %s" % (item, mode)) if not self.cfg_parser.has_section(mode): - print "WARNING: No configuration section %s for item %s" % (mode,item) + print("WARNING: No configuration section %s for item %s" % (mode, item)) continue keys = self.cfg_parser.options(mode) keys.sort() if self.cfg_parser.has_option(mode, 'leave_this_one_to_me'): - print "Ignoring section %s" % (mode,) + print("Ignoring section %s" % (mode)) continue for key in keys: - print "%s_%s = %s" % (mode, key ,self.cfg_parser.get(mode,key)) + print("%s_%s = %s" % (mode, key, self.cfg_parser.get(mode, key))) def read_config(self, value=None): """ @@ -377,7 +445,7 @@ if not value: value = self.defaults.config_file - if hasattr(self, 'cli_keywords') and not self.cli_keywords == None: + if hasattr(self, 'cli_keywords') and self.cli_keywords is not None: value = self.cli_keywords.config_file self.cfg_parser = SafeConfigParser() @@ -397,7 +465,7 @@ exec("args = %r" % args) - print "%s/%s: %r" % (args[0],args[1],self.get(args[0], args[1])) + print("%s/%s: %r" % (args[0], args[1], self.get(args[0], args[1]))) # if len(args) == 3: # # Return non-zero if no match @@ -444,8 +512,9 @@ """ Create a logger instance using cli_options.debuglevel """ + global log - if not self.cli_keywords.debuglevel == None: + if self.cli_keywords.debuglevel is not None: loglevel = logging.DEBUG else: loglevel = logging.INFO @@ -454,18 +523,34 @@ self.debuglevel = self.cli_keywords.debuglevel # Initialize logger - log = pykolab.logger.Logger(loglevel=loglevel, debuglevel=self.cli_keywords.debuglevel, logfile=self.cli_keywords.logfile) + log = pykolab.logger.Logger( + loglevel=loglevel, + debuglevel=self.cli_keywords.debuglevel, + logfile=self.cli_keywords.logfile + ) def set_defaults_from_cli_options(self): for long_opt in self.cli_parser.__dict__['_long_opt'].keys(): if long_opt == "--help": continue - setattr(self.defaults,self.cli_parser._long_opt[long_opt].dest,self.cli_parser._long_opt[long_opt].default) + + setattr( + self.defaults, + self.cli_parser._long_opt[long_opt].dest, + self.cli_parser._long_opt[long_opt].default + ) # But, they should be available in our class as well for option in self.cli_parser.defaults.keys(): - log.debug(_("Setting %s to %r (from the default values for CLI options)") % (option, self.cli_parser.defaults[option]), level=8) - setattr(self,option,self.cli_parser.defaults[option]) + log.debug( + _("Setting %s to %r (from the default values for CLI options)") % ( + option, + self.cli_parser.defaults[option] + ), + level=8 + ) + + setattr(self, option, self.cli_parser.defaults[option]) def has_section(self, section): if not self.cfg_parser: @@ -479,7 +564,7 @@ return self.cfg_parser.has_option(section, option) - def get_list(self, section, key): + def get_list(self, section, key, default=None): """ Gets a comma and/or space separated list from the configuration file and returns a list. @@ -488,13 +573,14 @@ untrimmed_values = [] setting = self.get_raw(section, key) - if setting == None: - return [] + + if setting is None: + return default if default else [] raw_values = setting.split(',') - if raw_values == None: - return [] + if raw_values is None: + return default if default else [] for raw_value in raw_values: untrimmed_values.extend(raw_value.split(' ')) @@ -510,11 +596,11 @@ self.read_config() if self.cfg_parser.has_option(section, key): - return self.cfg_parser.get(section,key, 1) + return self.cfg_parser.get(section, key, 1) return default - def get(self, section, key, quiet=False): + def get(self, section, key, default=None, quiet=False): """ Get a configuration option from our store, the configuration file, or an external source if we have some sort of function for it. @@ -526,40 +612,56 @@ if not self.cfg_parser: self.read_config() - #log.debug(_("Obtaining value for section %r, key %r") % (section, key), level=8) + # log.debug(_("Obtaining value for section %r, key %r") % (section, key), level=8) if self.cfg_parser.has_option(section, key): try: return self.cfg_parser.get(section, key) - except: + except Exception: self.read_config() return self.cfg_parser.get(section, key) - if hasattr(self, "get_%s_%s" % (section,key)): + if hasattr(self, "get_%s_%s" % (section, key)): try: - exec("retval = self.get_%s_%s(quiet)" % (section,key)) - except Exception, e: - log.error(_("Could not execute configuration function: %s") % ("get_%s_%s(quiet=%r)" % (section,key,quiet))) - return None + exec("retval = self.get_%s_%s(quiet)" % (section, key)) + except Exception: + log.error( + _("Could not execute configuration function: %s") % ( + "get_%s_%s(quiet=%r)" % ( + section, + key, + quiet + ) + ) + ) + + return default return retval if quiet: return "" else: - log.warning(_("Option %s/%s does not exist in config file %s, pulling from defaults") % (section, key, self.config_file)) - if hasattr(self.defaults, "%s_%s" % (section,key)): - return getattr(self.defaults, "%s_%s" % (section,key)) + log.warning( + _("Option %s/%s does not exist in config file %s, pulling from defaults") % ( + section, + key, + self.config_file + ) + ) + + if hasattr(self.defaults, "%s_%s" % (section, key)): + return getattr(self.defaults, "%s_%s" % (section, key)) elif hasattr(self.defaults, "%s" % (section)): if key in getattr(self.defaults, "%s" % (section)): _dict = getattr(self.defaults, "%s" % (section)) return _dict[key] else: log.warning(_("Option does not exist in defaults.")) - return None + return default else: log.warning(_("Option does not exist in defaults.")) - return None + return default def check_setting_config_file(self, value): if os.path.isfile(value): @@ -576,7 +678,13 @@ def check_setting_debuglevel(self, value): if value < 0: - log.info(_("WARNING: A negative debug level value does not make this program be any more silent.")) + log.info( + _( + "WARNING: A negative debug level value does not " + + "make this program be any more silent." + ) + ) + elif value == 0: return True elif value <= 9: @@ -595,7 +703,7 @@ else: try: os.remove("/var/run/saslauthd/mux") - except IOError, e: + except IOError: log.error(_("Cannot start SASL authentication daemon")) return False elif os.path.isfile("/var/run/sasl2/mux"): @@ -605,7 +713,7 @@ else: try: os.remove("/var/run/sasl2/mux") - except IOError, e: + except IOError: log.error(_("Cannot start SASL authentication daemon")) return False return True @@ -644,14 +752,18 @@ # Attempt to load the suite, # Get the suite's options, # Set them here. - if not hasattr(self,'test_suites'): + if not hasattr(self, 'test_suites'): self.test_suites = [] if "zpush" in value: selectively = False - for item in [ 'calendar', 'contacts', 'mail' ]: + for item in ['calendar', 'contacts', 'mail']: if self.cli_keywords.__dict__[item]: - log.debug(_("Found you specified a specific set of items to test: %s") % (item), level=8) + log.debug( + _("Found you specified a specific set of items to test: %s") % (item), + level=8 + ) + selectively = item if not selectively:
View file
pykolab-0.8.15.tar.gz/pykolab/conf/defaults.py -> pykolab-0.8.16.tar.gz/pykolab/conf/defaults.py
Changed
@@ -19,6 +19,7 @@ import logging + class Defaults(object): def __init__(self, plugins=None): self.loglevel = logging.CRITICAL @@ -33,10 +34,12 @@ self.mail_attributes = ['mail', 'alias'] self.mailserver_attribute = 'mailhost' - # when you want a new domain to be added in a short time, you should reduce this value to 10 seconds + # when you want a new domain to be added in a short time, you should reduce this value to + # 10 seconds self.kolab_domain_sync_interval = 600 self.kolab_default_locale = 'en_US' + self.ldap_timeout = 10 self.ldap_unique_attribute = 'nsuniqueid' - self.wallace_resource_calendar_expire_days = 100 \ No newline at end of file + self.wallace_resource_calendar_expire_days = 100
View file
pykolab-0.8.15.tar.gz/pykolab/imap/__init__.py -> pykolab-0.8.16.tar.gz/pykolab/imap/__init__.py
Changed
@@ -54,20 +54,16 @@ if len(aci_subject.split('@')) > 1: lm_suffix = "@%s" % (aci_subject.split('@')[1]) - shared_folders = self.imap.lm( - "shared/*%s" % (lm_suffix) - ) + shared_folders = self.imap.lm("shared/*%s" % (lm_suffix)) - user_folders = self.imap.lm( - "user/*%s" % (lm_suffix) - ) + user_folders = self.imap.lm("user/*%s" % (lm_suffix)) log.debug( - _("Cleaning up ACL entries referring to identifier %s") % ( - aci_subject - ), - level=5 - ) + _("Cleaning up ACL entries referring to identifier %s") % ( + aci_subject + ), + level=5 + ) # For all folders (shared and user), ... folders = user_folders + shared_folders @@ -128,12 +124,12 @@ # deployment. backend = conf.get('kolab', 'imap_backend') - if not domain == None: + if domain is not None: self.domain = domain if conf.has_section(domain) and conf.has_option(domain, 'imap_backend'): backend = conf.get(domain, 'imap_backend') - if uri == None: + if uri is None: if conf.has_section(domain) and conf.has_option(domain, 'imap_uri'): uri = conf.get(domain, 'imap_uri') else: @@ -143,7 +139,7 @@ hostname = None port = None - if uri == None: + if uri is None: uri = conf.get(backend, 'uri') result = urlparse(uri) @@ -162,13 +158,13 @@ scheme = uri.split(':')[0] (hostname, port) = uri.split('/')[2].split(':') - if not server == None: + if server is not None: hostname = server - if scheme == None or scheme == "": + if scheme is None or scheme == "": scheme = 'imaps' - if port == None: + if port is None: if scheme == "imaps": port = 993 elif scheme == "imap": @@ -182,10 +178,10 @@ admin_login = conf.get(backend, 'admin_login') admin_password = conf.get(backend, 'admin_password') - if admin_password == None or admin_password == '': + if admin_password is None or admin_password == '': log.error(_("No administrator password is available.")) - if not self._imap.has_key(hostname): + if hostname not in self._imap: if backend == 'cyrus-imap': import cyrus self._imap[hostname] = cyrus.Cyrus(uri) @@ -216,19 +212,25 @@ else: if not login: self.disconnect(hostname) - self.connect(uri=uri,login=False) - elif login and not hasattr(self._imap[hostname],'logged_in'): + self.connect(uri=uri, login=False) + elif login and not hasattr(self._imap[hostname], 'logged_in'): self.disconnect(hostname) self.connect(uri=uri) else: try: if hasattr(self._imap[hostname], 'm'): self._imap[hostname].m.noop() - elif hasattr(self._imap[hostname], 'noop') and callable(self._imap[hostname].noop): + elif hasattr(self._imap[hostname], 'noop') \ + and callable(self._imap[hostname].noop): + self._imap[hostname].noop() - log.debug(_("Reusing existing IMAP server connection to %s") % (hostname), level=8) - except: + log.debug( + _("Reusing existing IMAP server connection to %s") % (hostname), + level=8 + ) + + except Exception: log.debug(_("Reconnecting to IMAP server %s") % (hostname), level=8) self.disconnect(hostname) self.connect() @@ -243,7 +245,7 @@ self._set_socket_keepalive(self.imap.sock) def disconnect(self, server=None): - if server == None: + if server is None: # No server specified, but make sure self.imap is None anyways if hasattr(self, 'imap'): del self.imap @@ -253,33 +255,30 @@ del self._imap[key] else: - if self._imap.has_key(server): + if server in self._imap: del self._imap[server] else: - log.warning(_("Called imap.disconnect() on a server that we had no connection to.")) + log.warning( + _("Called imap.disconnect() on a server that we had no connection to.") + ) def create_folder(self, folder_path, server=None, partition=None): folder_path = self.folder_utf7(folder_path) - if not server == None: + if server is not None: self.connect(server=server) try: self._imap[server].cm(folder_path, partition=partition) return True - except: - log.error( - _("Could not create folder %r on server %r") % ( - folder_path, - server - ) - ) + except Exception: + log.error(_("Could not create folder %r on server %r") % (folder_path, server)) else: try: self.imap.cm(folder_path, partition=partition) return True - except: + except Exception: log.error(_("Could not create folder %r") % (folder_path)) return False @@ -290,9 +289,9 @@ if hasattr(self.imap.m, name): return getattr(self.imap.m, name) else: - raise AttributeError, _("%r has no attribute %s") % (self,name) + raise AttributeError(_("%r has no attribute %s") % (self, name)) else: - raise AttributeError, _("%r has no attribute %s") % (self,name) + raise AttributeError(_("%r has no attribute %s") % (self, name)) def folder_utf7(self, folder): from pykolab import imap_utf7 @@ -313,13 +312,13 @@ _metadata = self.imap.getannotation(self.folder_utf7(folder), '*') - for (k,v) in _metadata.items(): + for (k, v) in _metadata.items(): metadata[self.folder_utf8(k)] = v return metadata def get_separator(self): - if not hasattr(self, 'imap') or self.imap == None: + if not hasattr(self, 'imap') or self.imap is None: self.connect() if hasattr(self.imap, 'separator'): @@ -357,13 +356,13 @@ if len(_namespaces) >= 3: _shared = [] - _shared.append(' '.join(_namespaces[2].replace('((','').replace('))','').split()[:-1]).replace('"', '')) + _shared.append(' '.join(_namespaces[2].replace('((', '').replace('))', '').split()[:-1]).replace('"', '')) if len(_namespaces) >= 2: - _other_users = ' '.join(_namespaces[1].replace('((','').replace('))','').split()[:-1]).replace('"', '') + _other_users = ' '.join(_namespaces[1].replace('((', '').replace('))', '').split()[:-1]).replace('"', '') if len(_namespaces) >= 1: - _personal = _namespaces[0].replace('((','').replace('))','').split()[0].replace('"', '') + _personal = _namespaces[0].replace('((', '').replace('))', '').split()[0].replace('"', '') return (_personal, _other_users, _shared) @@ -385,7 +384,7 @@ 'write': 'lrswite', } - if short_rights.has_key(acl): + if acl in short_rights: acl = short_rights[acl] else: for char in acl: @@ -428,7 +427,7 @@ try: self.imap.sam(self.folder_utf7(folder), identifier, acl) - except Exception, errmsg: + except Exception as errmsg: log.error( _("Could not set ACL for %s on folder %s: %r") % ( identifier, @@ -544,10 +543,10 @@ if not hasattr(self, 'domain'): self.domain = None - if self.domain == None and len(mailbox_base_name.split('@')) > 1: + if self.domain is None and len(mailbox_base_name.split('@')) > 1: self.domain = mailbox_base_name.split('@')[1] - if not self.domain == None: + if not self.domain is None: if conf.has_option(self.domain, "autocreate_folders"): _additional_folders = conf.get_raw( self.domain, @@ -564,7 +563,7 @@ auth.disconnect() if len(domains.keys()) > 0: - if domains.has_key(self.domain): + if self.domain in domains: primary = domains[self.domain] if conf.has_option(primary, "autocreate_folders"): @@ -573,7 +572,7 @@ "autocreate_folders" ) - if _additional_folders == None: + if _additional_folders is None: if conf.has_option('kolab', "autocreate_folders"): _additional_folders = conf.get_raw( 'kolab', @@ -588,13 +587,13 @@ } ) - if not additional_folders == None: + if additional_folders is not None: self.user_mailbox_create_additional_folders( mailbox_base_name, additional_folders ) - if not self.domain == None: + if not self.domain is None: if conf.has_option(self.domain, "sieve_mgmt"): sieve_mgmt_enabled = conf.get(self.domain, 'sieve_mgmt') if utils.true_or_false(sieve_mgmt_enabled): @@ -634,7 +633,7 @@ self.login_plain(admin_login, admin_password, user) (personal, other, shared) = self.namespaces() success = True - except Exception, errmsg: + except Exception as errmsg: if time.time() - last_log > 5 and self.imap_murder(): log.debug(_("Waiting for the Cyrus murder to settle... %r") % (errmsg)) last_log = time.time() @@ -661,7 +660,7 @@ log.warning(_("Failed to create folder: %s") % (folder_name)) continue - if additional_folders[additional_folder].has_key("annotations"): + if "annotations" in additional_folders[additional_folder]: for annotation in additional_folders[additional_folder]["annotations"].keys(): self.set_metadata( folder_name, @@ -669,7 +668,7 @@ "%s" % (additional_folders[additional_folder]["annotations"][annotation]) ) - if additional_folders[additional_folder].has_key("acls"): + if "acls" in additional_folders[additional_folder]: for acl in additional_folders[additional_folder]["acls"].keys(): self.set_acl( folder_name, @@ -686,7 +685,7 @@ domain = None domain_suffix = "" - if not domain == None: + if domain is not None: if conf.has_section(domain) and conf.has_option(domain, 'imap_backend'): backend = conf.get(domain, 'imap_backend') @@ -702,10 +701,10 @@ # Subscribe only to personal folders (personal, other, shared) = self.namespaces() - if not other == None: + if other is not None: _tests.append(other) - if not shared == None: + if shared is not None: for _shared in shared: _tests.append(_shared) @@ -729,7 +728,7 @@ log.debug(_("Subscribing %s to folder %s") % (user, _folder), level=8) try: self.subscribe(_folder) - except Exception, errmsg: + except Exception as errmsg: log.error(_("Subscribing %s to folder %s failed: %r") % (user, _folder, errmsg)) self.logout() @@ -749,16 +748,16 @@ domain_suffix ) - if additional_folders[additional_folder].has_key("quota"): + if "quota" in additional_folders[additional_folder]: try: self.imap.sq( folder_name, additional_folders[additional_folder]['quota'] ) - except Exception, errmsg: + except Exception as errmsg: log.error(_("Could not set quota on %s") % (additional_folder)) - if additional_folders[additional_folder].has_key("partition"): + if "partition" in additional_folders[additional_folder]: partition = additional_folders[additional_folder]["partition"] try: self.imap._rename(folder_name, folder_name, partition) @@ -815,7 +814,7 @@ """ self.connect() - folder = "user%s%s" %(self.get_separator(),mailbox_base_name) + folder = "user%s%s" %(self.get_separator(), mailbox_base_name) self.delete_mailfolder(folder) self.cleanup_acls(mailbox_base_name) @@ -837,23 +836,23 @@ def user_mailbox_rename(self, old_name, new_name, partition=None): self.connect() - old_name = "user%s%s" % (self.get_separator(),old_name) - new_name = "user%s%s" % (self.get_separator(),new_name) + old_name = "user%s%s" % (self.get_separator(), old_name) + new_name = "user%s%s" % (self.get_separator(), new_name) - if old_name == new_name and partition == None: + if old_name == new_name and partition is None: return if not self.has_folder(old_name): log.error(_("INBOX folder to rename (%s) does not exist") % (old_name)) - if not self.has_folder(new_name) or not partition == None: - log.info(_("Renaming INBOX from %s to %s") % (old_name,new_name)) + if not self.has_folder(new_name) or not partition is None: + log.info(_("Renaming INBOX from %s to %s") % (old_name, new_name)) try: - self.imap.rename(old_name,new_name,partition) + self.imap.rename(old_name, new_name, partition) except: - log.error(_("Could not rename INBOX folder %s to %s") % (old_name,new_name)) + log.error(_("Could not rename INBOX folder %s to %s") % (old_name, new_name)) else: - log.warning(_("Moving INBOX folder %s won't succeed as target folder %s already exists") % (old_name,new_name)) + log.warning(_("Moving INBOX folder %s won't succeed as target folder %s already exists") % (old_name, new_name)) def user_mailbox_server(self, mailbox): server = self.imap.find_mailfolder_server(mailbox.lower()).lower() @@ -865,7 +864,7 @@ Check if the environment has a folder named folder. """ folders = self.imap.lm(self.folder_utf7(folder)) - log.debug(_("Looking for folder '%s', we found folders: %r") % (folder,[self.folder_utf8(x) for x in folders]), level=8) + log.debug(_("Looking for folder '%s', we found folders: %r") % (folder, [self.folder_utf8(x) for x in folders]), level=8) # Greater then one, this folder may have subfolders. if len(folders) > 0: return True @@ -924,7 +923,7 @@ if epoch > (int)(time.time()): log.debug( _("Setting ACL rights %s for subject %s on folder " + \ - "%s") % (rights,subject,folder), level=8) + "%s") % (rights, subject, folder), level=8) self.set_acl( folder, @@ -935,7 +934,7 @@ else: log.debug( _("Removing ACL rights %s for subject %s on folder " + \ - "%s") % (rights,subject,folder), level=8) + "%s") % (rights, subject, folder), level=8) self.set_acl( folder, @@ -959,7 +958,7 @@ def move_user_folders(self, users=[], domain=None): for user in users: if type(user) == dict: - if user.has_key('old_mail'): + if 'old_mail' in user: inbox = "user/%s" % (user['mail']) old_inbox = "user/%s" % (user['old_mail']) @@ -967,11 +966,11 @@ log.debug(_("Found old INBOX folder %s") % (old_inbox), level=8) if not self.has_folder(inbox): - log.info(_("Renaming INBOX from %s to %s") % (old_inbox,inbox)) - self.imap.rename(old_inbox,inbox) + log.info(_("Renaming INBOX from %s to %s") % (old_inbox, inbox)) + self.imap.rename(old_inbox, inbox) self.inbox_folders.append(inbox) else: - log.warning(_("Moving INBOX folder %s won't succeed as target folder %s already exists") % (old_inbox,inbox)) + log.warning(_("Moving INBOX folder %s won't succeed as target folder %s already exists") % (old_inbox, inbox)) else: log.debug(_("Did not find old folder user/%s to rename") % (user['old_mail']), level=8) else: @@ -1004,7 +1003,7 @@ default_quota = auth.domain_default_quota(primary_domain) - if default_quota == "" or default_quota == None: + if default_quota == "" or default_quota is None: default_quota = 0 if len(users) == 0: @@ -1014,19 +1013,19 @@ quota = None if type(user) == dict: - if user.has_key(_quota_attr): + if _quota_attr in user: if type(user[_quota_attr]) == list: quota = user[_quota_attr].pop(0) elif type(user[_quota_attr]) == str: quota = user[_quota_attr] else: _quota = auth.get_user_attribute(primary_domain, user, _quota_attr) - if _quota == None: + if _quota is None: quota = 0 else: quota = _quota - if not user.has_key(_inbox_folder_attr): + if _inbox_folder_attr not in user: continue else: if type(user[_inbox_folder_attr]) == list: @@ -1040,7 +1039,7 @@ folder = folder.lower() try: - (used,current_quota) = self.imap.lq(folder) + (used, current_quota) = self.imap.lq(folder) except: # TODO: Go in fact correct the quota. log.warning(_("Cannot get current IMAP quota for folder %s") % (folder)) @@ -1058,11 +1057,11 @@ log.debug(_("Quota for %s currently is %s") % (folder, current_quota), level=7) - if new_quota == None: + if new_quota is None: continue if not int(new_quota) == int(quota): - log.info(_("Adjusting authentication database quota for folder %s to %d") % (folder,int(new_quota))) + log.info(_("Adjusting authentication database quota for folder %s to %d") % (folder, int(new_quota))) quota = int(new_quota) auth.set_user_attribute(primary_domain, user, _quota_attr, new_quota) @@ -1086,7 +1085,7 @@ mailhost = None if type(user) == dict: - if user.has_key(_mailserver_attr): + if _mailserver_attr in user: if type(user[_mailserver_attr]) == list: _mailserver = user[_mailserver_attr].pop(0) elif type(user[_mailserver_attr]) == str: @@ -1094,7 +1093,7 @@ else: _mailserver = auth.get_user_attribute(primary_domain, user, _mailserver_attr) - if not user.has_key(_inbox_folder_attr): + if _inbox_folder_attr not in user: continue else: if type(user[_inbox_folder_attr]) == list: @@ -1110,7 +1109,7 @@ _current_mailserver = self.imap.find_mailfolder_server(folder) - if not _mailserver == None: + if _mailserver is not None: # TODO: if not _current_mailserver == _mailserver: self.imap._xfer(folder, _current_mailserver, _mailserver) @@ -1138,7 +1137,7 @@ primary_domain, secondary_domains """ - if inbox_folders == None: + if inbox_folders is None: inbox_folders = [] folders = self.list_user_folders() @@ -1165,7 +1164,7 @@ mbox_parts = self.parse_mailfolder(mailfolder_path) - if mbox_parts == None: + if mbox_parts is None: # We got user identifier only log.error(_("Please don't give us just a user identifier")) return @@ -1204,7 +1203,7 @@ acceptable_domain_name_res = [] - if not primary_domain == None: + if primary_domain is not None: for domain in [ primary_domain ] + secondary_domains: acceptable_domain_name_res.append(domain_re % (domain)) @@ -1221,16 +1220,16 @@ #print "Acceptable indeed" #acceptable = True #if not acceptable: - #print "%s is not acceptable against %s yet using %s" % (folder.split('@')[1],folder,domain_name_re) + #print "%s is not acceptable against %s yet using %s" % (folder.split('@')[1], folder, domain_name_re) #if acceptable: - #folder_name = "%s@%s" % (folder.split(self.separator)[1].split('@')[0],folder.split('@')[1]) + #folder_name = "%s@%s" % (folder.split(self.separator)[1].split('@')[0], folder.split('@')[1]) - folder_name = "%s@%s" % (folder.split(self.get_separator())[1].split('@')[0],folder.split('@')[1]) + folder_name = "%s@%s" % (folder.split(self.get_separator())[1].split('@')[0], folder.split('@')[1]) else: folder_name = "%s" % (folder.split(self.get_separator())[1]) - if not folder_name == None: + if folder_name is not None: if not folder_name in folders: folders.append(folder_name)
View file
pykolab-0.8.15.tar.gz/pykolab/logger.py -> pykolab-0.8.16.tar.gz/pykolab/logger.py
Changed
@@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # +from __future__ import print_function import grp import logging @@ -23,11 +24,9 @@ import os import pwd import sys -import time -from pykolab.translate import _ -class StderrToLogger(object): +class StderrToLogger: """ Fake file-like stream object that redirects writes to a logger instance. """ @@ -36,7 +35,7 @@ self.log_level = log_level self.linebuf = '' self.skip_next = False - + def write(self, buf): # ugly patch to make smtplib and smtpd debug logging records appear on one line in log file # smtplib uses "print>>stderr, var, var" statements for debug logging. These @@ -59,10 +58,11 @@ else: self.logger.log(self.log_level, '%s %s', self.linebuf, line.rstrip()[:150]) self.linebuf = '' - - def flush(self): + + def flush(self): pass + class LoggerAdapter(logging.LoggerAdapter): """ Custom LoggingAdapter to log Wallace mail message Queue ID @@ -71,6 +71,7 @@ def process(self, msg, kwargs): return '%s %s' % (self.extra['qid'], msg), kwargs + class Logger(logging.Logger): """ The PyKolab version of a logger. @@ -88,31 +89,31 @@ for arg in sys.argv: if debuglevel == -1: try: - debuglevel = int(arg) - except ValueError, errmsg: + debuglevel = (int)(arg) + except ValueError: continue loglevel = logging.DEBUG break - if '-d' == arg: + if arg == '-d': debuglevel = -1 continue - if '-l' == arg: + if arg == '-l': loglevel = -1 continue - if '--fork' == arg: + if arg == '--fork': fork = True if loglevel == -1: - if hasattr(logging,arg.upper()): - loglevel = getattr(logging,arg.upper()) + if hasattr(logging, arg.upper()): + loglevel = getattr(logging, arg.upper()) else: loglevel = logging.DEBUG - if '-u' == arg or '--user' == arg: + if arg in ['-u', '--user']: process_username = -1 continue @@ -122,7 +123,7 @@ if process_username == -1: process_username = arg - if '-g' == arg or '--group' == arg: + if arg in ['-g', '--group']: process_groupname = -1 continue @@ -132,8 +133,11 @@ if process_groupname == -1: process_groupname = arg + # pylint: disable=too-many-branches + # pylint: disable=too-many-locals + # pylint: disable=too-many-statements def __init__(self, *args, **kw): - if kw.has_key('name'): + if 'name' in kw: name = kw['name'] elif len(args) == 1: name = args[0] @@ -142,7 +146,9 @@ logging.Logger.__init__(self, name) - plaintextformatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s [%(process)d] %(message)s") + plaintextformatter = logging.Formatter( + "%(asctime)s %(name)s %(levelname)s [%(process)d] %(message)s" + ) if not self.fork: self.console_stdout = logging.StreamHandler(sys.stdout) @@ -150,7 +156,7 @@ self.addHandler(self.console_stdout) - if kw.has_key('logfile'): + if 'logfile' in kw: self.logfile = kw['logfile'] else: self.logfile = '/var/log/kolab/pykolab.log' @@ -161,9 +167,9 @@ # Make sure (read: attempt to change) the permissions try: try: - (ruid, euid, suid) = os.getresuid() - (rgid, egid, sgid) = os.getresgid() - except AttributeError, errmsg: + (ruid, _, _) = os.getresuid() + (rgid, _, _) = os.getresgid() + except AttributeError: ruid = os.getuid() rgid = os.getgid() @@ -173,48 +179,52 @@ # Get group entry details try: ( - group_name, - group_password, - group_gid, - group_members - ) = grp.getgrnam(self.process_groupname) + _, + _, + group_gid, + _ + ) = grp.getgrnam(self.process_groupname) - except KeyError, errmsg: - group_name = False + except KeyError: + group_gid = False if ruid == 0: # Means we haven't switched yet. try: ( - user_name, - user_password, - user_uid, - user_gid, - user_gecos, - user_homedir, - user_shell - ) = pwd.getpwnam(self.process_username) - - except KeyError, errmsg: - user_name = False + _, + _, + user_uid, + _, + _, + _, + _ + ) = pwd.getpwnam(self.process_username) + + except KeyError: + user_uid = False if os.path.isfile(self.logfile): try: - if not user_uid == 0 or group_gid == 0: + if user_uid > 0 or group_gid > 0: os.chown( - self.logfile, - user_uid, - group_gid - ) - os.chmod(self.logfile, 0660) - - except Exception, errmsg: - self.error(_("Could not change permissions on %s: %r") % (self.logfile, errmsg)) + self.logfile, + user_uid, + group_gid + ) + + os.chmod(self.logfile, 660) + + except Exception as errmsg: + self.error( + _("Could not change permissions on %s: %r") % (self.logfile, errmsg) + ) + if self.debuglevel > 8: import traceback traceback.print_exc() - except Exception, errmsg: + except Exception as errmsg: if os.path.isfile(self.logfile): self.error(_("Could not change permissions on %s: %r") % (self.logfile, errmsg)) if self.debuglevel > 8: @@ -223,7 +233,7 @@ # Make sure the log file exists try: - fhandle = file(self.logfile, 'a') + fhandle = open(self.logfile, 'a') try: os.utime(self.logfile, None) finally: @@ -232,16 +242,16 @@ try: filelog_handler = logging.FileHandler(filename=self.logfile) filelog_handler.setFormatter(plaintextformatter) - except IOError, e: - print >> sys.stderr, _("Cannot log to file %s: %s") % (self.logfile, e) + except IOError as errmsg: + print(_("Cannot log to file %s: %s") % (self.logfile, errmsg), file=sys.stderr) - if not len(self.handlers) > 1: + if len(self.handlers) <= 1: try: self.addHandler(filelog_handler) - except: + except Exception: pass - except IOError, errmsg: + except IOError: pass def remove_stdout_handler(self): @@ -249,14 +259,16 @@ self.console_stdout.close() self.removeHandler(self.console_stdout) + # pylint: disable=arguments-differ + # pylint: disable=keyword-arg-before-vararg def debug(self, msg, level=1, *args, **kw): self.setLevel(self.loglevel) # Work around other applications not using various levels of debugging - if not self.name.startswith('pykolab') and not self.debuglevel == 9: + if not self.name.startswith('pykolab') and self.debuglevel != 9: return if level <= self.debuglevel: - # TODO: Not the way it's supposed to work! self.log(logging.DEBUG, msg) + logging.setLoggerClass(Logger)
View file
pykolab-0.8.15.tar.gz/pykolab/plugins/__init__.py -> pykolab-0.8.16.tar.gz/pykolab/plugins/__init__.py
Changed
@@ -30,6 +30,7 @@ log = pykolab.getLogger('pykolab.plugins') conf = pykolab.getConf() + class KolabPlugins(object): """ Detects, loads and interfaces with plugins for different @@ -50,7 +51,7 @@ if os.path.isdir(plugin_path): for plugin in os.listdir(plugin_path): - if os.path.isdir('%s/%s/' % (plugin_path,plugin,)): + if os.path.isdir('%s/%s/' % (plugin_path, plugin, )): self.plugins[plugin] = False self.check_plugins() @@ -67,11 +68,11 @@ self.plugins[plugin] = True self.load_plugins(plugins=[plugin]) except ImportError, e: - log.error(_("ImportError for plugin %s: %s") % (plugin,e)) + log.error(_("ImportError for plugin %s: %s") % (plugin, e)) traceback.print_exc() self.plugins[plugin] = False except RuntimeError, e: - log.error( _("RuntimeError for plugin %s: %s") % (plugin,e)) + log.error( _("RuntimeError for plugin %s: %s") % (plugin, e)) traceback.print_exc() self.plugins[plugin] = False except Exception, e: @@ -91,7 +92,7 @@ for plugin in plugins: if self.plugins[plugin]: try: - exec("self.%s = %s.Kolab%s()" % (plugin,plugin,plugin.capitalize())) + exec("self.%s = %s.Kolab%s()" % (plugin, plugin, plugin.capitalize())) except: # TODO: A little better verbosity please! traceback.print_exc() @@ -106,16 +107,16 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),"set_defaults"): + if hasattr(getattr(self, plugin), "set_defaults"): try: - getattr(self,plugin).set_defaults(defaults) + getattr(self, plugin).set_defaults(defaults) except TypeError, e: - log.error(_("Cannot set defaults for plugin %s: %s") % (plugin,e)) + log.error(_("Cannot set defaults for plugin %s: %s") % (plugin, e)) except RuntimeError, e: - log.error(_("Cannot set defaults for plugin %s: %s") % (plugin,e)) + log.error(_("Cannot set defaults for plugin %s: %s") % (plugin, e)) except: log.error(_("Cannot set defaults for plugin %s: Unknown Error") % (plugin)) @@ -132,14 +133,14 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),"set_runtime"): + if hasattr(getattr(self, plugin), "set_runtime"): try: - getattr(self,plugin).set_runtime(runtime) + getattr(self, plugin).set_runtime(runtime) except RuntimeError, e: - log.error(_("Cannot set runtime for plugin %s: %s") % (plugin,e)) + log.error(_("Cannot set runtime for plugin %s: %s") % (plugin, e)) else: log.debug(_("Not setting runtime for plugin %s: No function 'set_runtime()'") % (plugin), level=5) @@ -153,16 +154,16 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),"add_options"): + if hasattr(getattr(self, plugin), "add_options"): try: exec("self.%s.add_options(parser)" % plugin) except RuntimeError, e: - log.error(_("Cannot add options for plugin %s: %s") % (plugin,e)) + log.error(_("Cannot add options for plugin %s: %s") % (plugin, e)) except TypeError, e: - log.error(_("Cannot add options for plugin %s: %s") % (plugin,e)) + log.error(_("Cannot add options for plugin %s: %s") % (plugin, e)) else: log.debug(_("Not adding options for plugin %s: No function 'add_options()'") % plugin, level=5) @@ -177,14 +178,14 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),"check_options"): + if hasattr(getattr(self, plugin), "check_options"): try: exec("self.%s.check_options()" % plugin) except AttributeError, e: - log.error(_("Cannot check options for plugin %s: %s") % (plugin,e)) + log.error(_("Cannot check options for plugin %s: %s") % (plugin, e)) else: log.debug(_("Not checking options for plugin %s: No function 'check_options()'") % (plugin), level=5) @@ -199,11 +200,11 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),"%s_%s" % (func,option)): - exec("retval = getattr(self,plugin).%s_%s(val)" % (func,option)) + if hasattr(getattr(self, plugin), "%s_%s" % (func, option)): + exec("retval = getattr(self, plugin).%s_%s(val)" % (func, option)) return retval return False @@ -219,23 +220,34 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),hook): + if hasattr(getattr(self, plugin), hook): try: - log.debug(_("Executing hook %s for plugin %s") % (hook,plugin), level=8) - #print "retval = self.%s.%s(%r, %r)" % (plugin,hook, args, kw) - exec("retval = self.%s.%s(*args, **kw)" % (plugin,hook)) - except TypeError, e: - log.error(_("Cannot execute hook %s for plugin %s: %s") % (hook,plugin,e)) - except AttributeError, e: - log.error(_("Cannot execute hook %s for plugin %s: %s") % (hook,plugin,e)) + log.debug(_("Executing hook %s for plugin %s") % (hook, plugin), level=8) + func = getattr(getattr(self, plugin), hook) + retval = func(*args, **kw) + except TypeError as errmsg: + log.error( + _("Cannot execute hook %s for plugin %s: %s") % (hook, plugin, errmsg) + ) + + log.error(traceback.format_exc()) + except AttributeError as errmsg: + log.error( + _("Cannot execute hook %s for plugin %s: %s") % (hook, plugin, errmsg) + ) + + log.error(traceback.format_exc()) return retval def return_true_boolean_from_plugins(self, bool, plugins=[]): - """Given the name of a boolean, walks all specified plugins, or all available plugins, and returns True if a plugin has it set to true""" + """ + Given the name of a boolean, walks all specified plugins, or all available plugins, and + returns True if a plugin has it set to true + """ if len(plugins) < 1: plugins = self.plugins.keys() @@ -244,12 +256,12 @@ for plugin in plugins: if not self.plugins[plugin]: continue - if not hasattr(self,plugin): + if not hasattr(self, plugin): continue - if hasattr(getattr(self,plugin),bool): + if hasattr(getattr(self, plugin), bool): try: - exec("boolval = self.%s.%s" % (plugin,bool)) + exec("boolval = self.%s.%s" % (plugin, bool)) except AttributeError, e: pass else:
View file
pykolab-0.8.15.tar.gz/pykolab/setup/setup_imap.py -> pykolab-0.8.16.tar.gz/pykolab/setup/setup_imap.py
Changed
@@ -49,6 +49,17 @@ Apply the necessary settings to /etc/imapd.conf """ + configdirectory = "/var/lib/imap/" + partition_default = "/var/spool/imap/" + sievedir = "/var/lib/imap/sieve/" + + if os.path.isdir("/var/lib/cyrus/"): + configdirectory = "/var/lib/cyrus/" + sievedir = "/var/lib/cyrus/sieve/" + + if os.path.isdir("/var/spool/cyrus/mail/"): + partition_default = "/var/spool/cyrus/mail/" + imapd_settings = { "ldap_servers": conf.get('ldap', 'ldap_uri'), "ldap_base": conf.get('ldap', 'base_dn'), @@ -64,6 +75,9 @@ "ldap_member_attribute": "nsrole", "admins": conf.get('cyrus-imap', 'admin_login'), "postuser": "shared", + "configdirectory": configdirectory, + "partition_default": partition_default, + "sievedir": sievedir } template_file = None
View file
pykolab-0.8.15.tar.gz/pykolab/setup/setup_mysql.py -> pykolab-0.8.16.tar.gz/pykolab/setup/setup_mysql.py
Changed
@@ -49,6 +49,14 @@ ) mysql_group.add_option( + "--mysqlhost", + dest="mysqlhost", + action="store", + default='127.0.0.1', + help=_("The MySQL host address.") + ) + + mysql_group.add_option( "--mysqlrootpw", dest="mysqlrootpw", action="store", @@ -70,45 +78,46 @@ ] # on CentOS7, there is MariaDB instead of MySQL - mysqlservice = 'mysqld.service' - if os.path.isfile('/usr/lib/systemd/system/mariadb.service'): - mysqlservice = 'mariadb.service' - elif os.path.isfile('/usr/lib/systemd/system/mysql.service'): - mysqlservice = 'mysql.service' - if not os.path.isfile('/usr/lib/systemd/system/' + mysqlservice): - # on Debian Jessie, systemctl restart mysql - mysqlservice = 'mysql' - - if os.path.isfile('/bin/systemctl'): - subprocess.call(['/bin/systemctl', 'restart', mysqlservice]) - elif os.path.isfile('/sbin/service'): - subprocess.call(['/sbin/service', 'mysqld', 'restart']) - elif os.path.isfile('/usr/sbin/service'): - subprocess.call(['/usr/sbin/service', 'mysql', 'restart']) - else: - log.error(_("Could not start the MySQL database service.")) - - if os.path.isfile('/bin/systemctl'): - subprocess.call(['/bin/systemctl', 'enable', mysqlservice]) - elif os.path.isfile('/sbin/chkconfig'): - subprocess.call(['/sbin/chkconfig', 'mysqld', 'on']) - elif os.path.isfile('/usr/sbin/update-rc.d'): - subprocess.call(['/usr/sbin/update-rc.d', 'mysql', 'defaults']) - else: - log.error( - _("Could not configure to start on boot, the MySQL database service.") - ) + if conf.mysqlserver != 'existing': + mysqlservice = 'mysqld.service' + if os.path.isfile('/usr/lib/systemd/system/mariadb.service'): + mysqlservice = 'mariadb.service' + elif os.path.isfile('/usr/lib/systemd/system/mysql.service'): + mysqlservice = 'mysql.service' + if not os.path.isfile('/usr/lib/systemd/system/' + mysqlservice): + # on Debian Jessie, systemctl restart mysql + mysqlservice = 'mysql' + + if os.path.isfile('/bin/systemctl'): + subprocess.call(['/bin/systemctl', 'restart', mysqlservice]) + elif os.path.isfile('/sbin/service'): + subprocess.call(['/sbin/service', 'mysqld', 'restart']) + elif os.path.isfile('/usr/sbin/service'): + subprocess.call(['/usr/sbin/service', 'mysql', 'restart']) + else: + log.error(_("Could not start the MySQL database service.")) + + if os.path.isfile('/bin/systemctl'): + subprocess.call(['/bin/systemctl', 'enable', mysqlservice]) + elif os.path.isfile('/sbin/chkconfig'): + subprocess.call(['/sbin/chkconfig', 'mysqld', 'on']) + elif os.path.isfile('/usr/sbin/update-rc.d'): + subprocess.call(['/usr/sbin/update-rc.d', 'mysql', 'defaults']) + else: + log.error( + _("Could not configure to start on boot, the MySQL database service.") + ) - log.info(_("Waiting for at most 30 seconds for MySQL/MariaDB to settle...")) - max_wait = 30 - while max_wait > 0: - for socket_path in socket_paths: - if os.path.exists(socket_path): - max_wait = 0 + log.info(_("Waiting for at most 30 seconds for MySQL/MariaDB to settle...")) + max_wait = 30 + while max_wait > 0: + for socket_path in socket_paths: + if os.path.exists(socket_path): + max_wait = 0 - if max_wait > 0: - max_wait = max_wait - 1 - time.sleep(1) + if max_wait > 0: + max_wait = max_wait - 1 + time.sleep(1) options = { 1: "Existing MySQL server (with root password already set).", @@ -116,14 +125,17 @@ } answer = 0 - if len([x for x in socket_paths if os.path.exists(x)]) > 0: - if conf.mysqlserver: - if conf.mysqlserver == 'existing': - answer = 1 - elif conf.mysqlserver == 'new': - answer = 2 - if answer == 0: - answer = utils.ask_menu(_("What MySQL server are we setting up?"), options) + if conf.mysqlserver != 'existing': + if len([x for x in socket_paths if os.path.exists(x)]) > 0: + if conf.mysqlserver: + if conf.mysqlserver == 'existing': + answer = 1 + elif conf.mysqlserver == 'new': + answer = 2 + if answer == 0: + answer = utils.ask_menu(_("What MySQL server are we setting up?"), options) + else: + answer = 1 if answer == "1" or answer == 1: if not conf.mysqlrootpw: @@ -214,7 +226,8 @@ [mysql] user=root password='%s' -""" % (mysql_root_password) +host=%s +""" % (mysql_root_password, conf.mysqlhost) fp = open('/tmp/kolab-setup-my.cnf', 'w') os.chmod('/tmp/kolab-setup-my.cnf', 600)
View file
pykolab-0.8.15.tar.gz/pykolab/setup/setup_roundcube.py -> pykolab-0.8.16.tar.gz/pykolab/setup/setup_roundcube.py
Changed
@@ -232,7 +232,8 @@ [mysql] user=root password='%s' -""" % (mysql_root_password) +host=%s +""" % (mysql_root_password, conf.mysqlhost) fp = open('/tmp/kolab-setup-my.cnf', 'w') os.chmod('/tmp/kolab-setup-my.cnf', 600)
View file
pykolab-0.8.15.tar.gz/pykolab/setup/setup_syncroton.py -> pykolab-0.8.16.tar.gz/pykolab/setup/setup_syncroton.py
Changed
@@ -33,12 +33,20 @@ log = pykolab.getLogger('pykolab.setup') conf = pykolab.getConf() + def __init__(): - components.register('syncroton', execute, description=description(), after=['mysql','ldap','roundcube']) + components.register( + 'syncroton', + execute, + description=description(), + after=['mysql','ldap','roundcube'] + ) + def description(): return _("Setup Syncroton.") + def execute(*args, **kw): schema_files = [] for root, directories, filenames in os.walk('/usr/share/doc/'): @@ -71,7 +79,8 @@ [mysql] user=root password='%s' -""" % (mysql_root_password) +host=%s +""" % (mysql_root_password, conf.mysqlhost) fp = open('/tmp/kolab-setup-my.cnf', 'w') os.chmod('/tmp/kolab-setup-my.cnf', 0600)
View file
pykolab-0.8.15.tar.gz/pykolab/utils.py -> pykolab-0.8.16.tar.gz/pykolab/utils.py
Changed
@@ -17,21 +17,38 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. # +from __future__ import print_function + import base64 import getpass import grp import os import pwd +from six import string_types import struct import sys import pykolab from pykolab import constants -from pykolab.translate import _ +from pykolab.translate import _ as _l +# pylint: disable=invalid-name log = pykolab.getLogger('pykolab.utils') conf = pykolab.getConf() +try: + # pylint: disable=redefined-builtin + input = raw_input +except NameError: + pass + +try: + unicode('') +except NameError: + unicode = str + + +# pylint: disable=too-many-branches def ask_question(question, default="", password=False, confirm=False): """ Ask a question on stderr. @@ -43,56 +60,57 @@ Usage: pykolab.utils.ask_question("What is the server?", default="localhost") """ - - if not default == "" and not default == None and conf.cli_keywords.answer_default: + if default != "" and default is not None and conf.cli_keywords.answer_default: if not conf.cli_keywords.quiet: - print ("%s [%s]: " % (question, default)) + print("%s [%s]: " % (question, default)) return default if password: - if default == "" or default == None: + if default == "" or default is None: answer = getpass.getpass("%s: " % (question)) else: answer = getpass.getpass("%s [%s]: " % (question, default)) else: - if default == "" or default == None: - answer = raw_input("%s: " % (question)) + if default == "" or default is None: + answer = input("%s: " % (question)) else: - answer = raw_input("%s [%s]: " % (question, default)) + answer = input("%s [%s]: " % (question, default)) + # pylint: disable=too-many-nested-blocks if not answer == "": if confirm: answer_confirm = None answer_confirmed = False while not answer_confirmed: if password: - answer_confirm = getpass.getpass(_("Confirm %s: ") % (question)) + answer_confirm = getpass.getpass(_l("Confirm %s: ") % (question)) else: - answer_confirm = raw_input(_("Confirm %s: ") % (question)) + answer_confirm = input(_l("Confirm %s: ") % (question)) if not answer_confirm == answer: - print >> sys.stderr, _("Incorrect confirmation. " + \ - "Please try again.") + print(_l("Incorrect confirmation. Please try again."), file=sys.stderr) if password: - if default == "" or default == None: - answer = getpass.getpass(_("%s: ") % (question)) + if default == "" or default is None: + answer = getpass.getpass(_l("%s: ") % (question)) else: - answer = getpass.getpass(_("%s [%s]: ") % (question, default)) + answer = getpass.getpass(_l("%s [%s]: ") % (question, default)) else: - if default == "" or default == None: - answer = raw_input(_("%s: ") % (question)) + if default == "" or default is None: + answer = input(_l("%s: ") % (question)) else: - answer = raw_input(_("%s [%s]: ") % (question, default)) + answer = input(_l("%s [%s]: ") % (question, default)) else: answer_confirmed = True if answer == "": return default - else: - return answer + return answer + + +# pylint: disable=too-many-return-statements def ask_confirmation(question, default="y", all_inclusive_no=True): """ Create a confirmation dialog, including a default option (capitalized), @@ -101,11 +119,11 @@ """ default_answer = None - if default in [ "y", "Y" ]: + if default in ["y", "Y"]: default_answer = True default_no = "n" default_yes = "Y" - elif default in [ "n", "N" ]: + elif default in ["n", "N"]: default_answer = False default_no = "N" default_yes = "y" @@ -115,44 +133,50 @@ default_no = "'no'" default_yes = "Please type 'yes'" - if conf.cli_keywords.answer_yes or (conf.cli_keywords.answer_default and default_answer is not None): + if conf.cli_keywords.answer_yes \ + or (conf.cli_keywords.answer_default and default_answer is not None): + if not conf.cli_keywords.quiet: - print ("%s [%s/%s]: " % (question,default_yes,default_no)) + print("%s [%s/%s]: " % (question, default_yes, default_no)) if conf.cli_keywords.answer_yes: return True if conf.cli_keywords.answer_default: return default_answer answer = False - while answer == False: - answer = raw_input("%s [%s/%s]: " % (question,default_yes,default_no)) + while not answer: + answer = input("%s [%s/%s]: " % (question, default_yes, default_no)) # Parse answer and set back to False if not appropriate if all_inclusive_no: - if answer == "" and not default_answer == None: + if answer == "" and default_answer is not None: return default_answer - elif answer in [ "y", "Y", "yes" ]: + + if answer in ["y", "Y", "yes"]: return True - elif answer in [ "n", "N", "no" ]: - return False - else: - answer = False - print >> sys.stderr, _("Please answer 'yes' or 'no'.") - else: - if not answer in [ "y", "Y", "yes" ]: + + if answer in ["n", "N", "no"]: return False - else: - return True + answer = False + print(_l("Please answer 'yes' or 'no'."), file=sys.stderr) + + if answer not in ["y", "Y", "yes"]: + return False + + return True + + +# pylint: disable=dangerous-default-value def ask_menu(question, options={}, default=''): - if not default == '' and conf.cli_keywords.answer_default: + if default != '' and conf.cli_keywords.answer_default: if not conf.cli_keywords.quiet: - print question + " [" + default + "]:" + print(question + " [" + default + "]:") return default - if not default == '': - print question + " [" + default + "]:" + if default != '': + print(question + " [" + default + "]:") else: - print question + print(question) answer_correct = False max_key_length = 0 @@ -162,7 +186,7 @@ options = {} for key in _options: options[key] = key - + keys = options.keys() keys.sort() @@ -174,24 +198,24 @@ str_format = "%%%ds" % max_key_length - if default == '' or not default in options.keys(): + if default == '' or default not in options.keys(): for key in keys: if options[key] == key: - print " - " + key + print(" - " + key) else: - print " - " + eval("str_format % key") + ": " + options[key] + print(" - " + str_format % key + ": " + options[key]) - answer = raw_input(_("Choice") + ": ") + answer = input(_l("Choice") + ": ") else: - answer = raw_input(_("Choice (type '?' for options)") + ": ") + answer = input(_l("Choice (type '?' for options)") + ": ") if answer == '?': for key in keys: if options[key] == key: - print " - " + key + print(" - " + key) else: - print " - " + eval("str_format % key") + ": " + options[key] + print(" - " + str_format % key + ": " + options[key]) continue @@ -203,8 +227,9 @@ return answer + def decode(key, enc): - if key == None: + if key is None: return enc dec = [] @@ -215,8 +240,9 @@ dec.append(dec_c) return "".join(dec) + def encode(key, clear): - if key == None: + if key is None: return clear enc = [] @@ -226,15 +252,16 @@ enc.append(enc_c) return base64.urlsafe_b64encode("".join(enc)) + def ensure_directory(_dir, _user='root', _group='root'): if not os.path.isdir(_dir): os.makedirs(_dir) try: try: - (ruid, euid, suid) = os.getresuid() - (rgid, egid, sgid) = os.getresgid() - except AttributeError, errmsg: + (ruid, _, _) = os.getresuid() + (rgid, _, _) = os.getresgid() + except AttributeError: ruid = os.getuid() rgid = os.getgid() @@ -243,18 +270,10 @@ if rgid == 0: # Get group entry details try: - ( - group_name, - group_password, - group_gid, - group_members - ) = grp.getgrnam(_group) + (_, _, group_gid, _) = grp.getgrnam(_group) except KeyError: - print >> sys.stderr, _("Group %s does not exist") % ( - _group - ) - + print(_l("Group %s does not exist") % (_group), file=sys.stderr) sys.exit(1) # Set real and effective group if not the same as current. @@ -264,28 +283,20 @@ if ruid == 0: # Means we haven't switched yet. try: - ( - user_name, - user_password, - user_uid, - user_gid, - user_gecos, - user_homedir, - user_shell - ) = pwd.getpwnam(_user) + (_, _, user_uid, _, _, _, _) = pwd.getpwnam(_user) except KeyError: - print >> sys.stderr, _("User %s does not exist") % (_user) + print(_l("User %s does not exist") % (_user), file=sys.stderr) sys.exit(1) - # Set real and effective user if not the same as current. if not user_uid == ruid: os.chown(_dir, user_uid, -1) - except: - print >> sys.stderr, _("Could not change the permissions on %s") % (_dir) + except Exception: + print(_l("Could not change the permissions on %s") % (_dir), file=sys.stderr) + def generate_password(): import subprocess @@ -299,6 +310,7 @@ return output + def multiline_message(message): if hasattr(conf, 'cli_keywords') and hasattr(conf.cli_keywords, 'quiet'): if conf.cli_keywords.quiet: @@ -326,41 +338,44 @@ return "\n%s\n" % ("\n".join(lines)) + def stripped_message(message): return "\n" + message.strip() + "\n" + def str2unicode(s, encoding='utf-8'): if isinstance(s, unicode): return s try: return unicode(s, encoding) - except: + except Exception: pass return s + def normalize(_object): - if type(_object) == list: + if isinstance(_object, list): result = [] - elif type(_object) == dict: + elif isinstance(_object, dict): result = {} else: return _object - if type(_object) == list: + if isinstance(_object, list): for item in _object: result.append(item.lower()) result = list(set(result)) return result - elif type(_object) == dict: + if isinstance(_object, dict): def _strip(value): try: return value.strip() - except: + except Exception: return value for key in _object: - if type(_object[key]) == list: + if isinstance(_object[key], list): if _object[key] is None: continue @@ -382,21 +397,21 @@ result[key.lower()] = _strip(_object[key]) - if result.has_key('objectsid') and not result['objectsid'][0] == "S": + if 'objectsid' in result and not result['objectsid'][0] == "S": result['objectsid'] = sid_to_string(result['objectsid']) - if result.has_key('sn'): + if 'sn' in result: result['surname'] = result['sn'].replace(' ', '') - if result.has_key('mail'): + if 'mail' in result: if isinstance(result['mail'], list): result['mail'] = result['mail'][0] - if len(result['mail']) > 0: + if result['mail']: if len(result['mail'].split('@')) > 1: result['domain'] = result['mail'].split('@')[1] - if not result.has_key('domain') and result.has_key('standard_domain'): + if 'domain' not in result and 'standard_domain' in result: result['domain'] = result['standard_domain'] if 'objectclass' not in result: @@ -412,7 +427,8 @@ return result -def parse_input(_input, splitchars= [ ' ' ]): + +def parse_input(_input, splitchars=[' ']): """ Split the input string using the split characters defined in splitchars, and remove the empty list items, then unique the @@ -438,6 +454,7 @@ return _output_list + def parse_ldap_uri(uri): """ Parse an LDAP URI and return it's components. @@ -462,7 +479,7 @@ _server = _ldap_uri.split('//')[1].split('/')[0] _base_dn = _ldap_uri.split('//')[1].split('/')[1] - except: + except Exception: _server = uri.split('//')[1].split('/')[0] _attr = None _scope = None @@ -483,7 +500,7 @@ if _attr == '': _attrs = [] else: - _attrs = [ _attr ] + _attrs = [_attr] if _scope == '': _scope = 'sub' @@ -491,11 +508,12 @@ if _filter == '': _filter = "(objectclass=*)" - return (_protocol, _server, _port, _base_dn, _attr, _scope, _filter) + return (_protocol, _server, _port, _base_dn, _attrs, _scope, _filter) - except: + except Exception: return None + def pop_empty_from_list(_input_list): _output_list = [] @@ -503,6 +521,7 @@ if not item == '': _output_list.append(item) + def sid_to_string(sid): srl = ord(sid[0]) number_sub_id = ord(sid[1]) @@ -511,70 +530,77 @@ sub_ids = [] for i in range(number_sub_id): - sub_ids.append(struct.unpack('<I',sid[8+4*i:12+4*i])[0]) + sub_ids.append(struct.unpack('<I', sid[8 + 4 * i:12 + 4 * i])[0]) result = 'S-%d-%d-%s' % ( - srl, - iav, - '-'.join([str(s) for s in sub_ids]), - ) + srl, + iav, + '-'.join([str(s) for s in sub_ids]), + ) return result + def standard_root_dn(domain): return 'dc=%s' % (',dc='.join(domain.split('.'))) + def translate(mystring, locale_name='en_US'): import locale import subprocess - log.debug(_("Transliterating string %r with locale %r") % (mystring, locale_name), level=8) + log.debug(_l("Transliterating string %r with locale %r") % (mystring, locale_name), level=8) if len(locale.normalize(locale_name).split('.')) > 1: - (locale_name,locale_charset) = locale.normalize(locale_name).split('.') + (locale_name, locale_charset) = locale.normalize(locale_name).split('.') else: locale_charset = 'utf-8' try: - log.debug(_("Attempting to set locale"), level=8) - locale.setlocale(locale.LC_ALL, (locale_name,locale_charset)) - log.debug(_("Success setting locale"), level=8) - except: - log.debug(_("Failure to set locale"), level=8) - pass + log.debug(_l("Attempting to set locale"), level=8) + locale.setlocale(locale.LC_ALL, (locale_name, locale_charset)) + log.debug(_l("Success setting locale"), level=8) + except Exception: + log.debug(_l("Failure to set locale"), level=8) + + command = ['/usr/bin/iconv', '-f', 'UTF-8', '-t', 'ASCII//TRANSLIT', '-s'] - command = [ '/usr/bin/iconv', - '-f', 'UTF-8', - '-t', 'ASCII//TRANSLIT', - '-s' ] + log.debug(_l("Executing '%s | %s'") % (r"%s" % (mystring), ' '.join(command)), level=8) - log.debug(_("Executing '%s | %s'") % (r"%s" % (mystring), ' '.join(command)), level=8) - process = subprocess.Popen(command, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env={'LANG': locale.normalize(locale_name)}) + process = subprocess.Popen( + command, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + env={'LANG': locale.normalize(locale_name)} + ) try: - print >> process.stdin, r"%s" % mystring - except UnicodeEncodeError, errmsg: + print(r"%s" % (mystring), file=process.stdin) + except UnicodeEncodeError: pass result = process.communicate()[0].strip() if '?' in result or (result == '' and not mystring == ''): - log.warning(_("Could not translate %s using locale %s") % (mystring, locale_name)) + log.warning(_l("Could not translate %s using locale %s") % (mystring, locale_name)) from pykolab import translit result = translit.transliterate(mystring, locale_name) return result + def true_or_false(val): - if val == None: + if val is None: return False if isinstance(val, bool): return val - if isinstance(val, basestring) or isinstance(val, str): + if isinstance(val, string_types): val = val.lower() - if val in [ "true", "yes", "y", "1" ]: + + if val in ["true", "yes", "y", "1"]: return True else: return False @@ -585,6 +611,7 @@ else: return False + def is_service(services): """ Checks each item in list services to see if it has a RC script in @@ -605,4 +632,4 @@ else: _other_services.append(service) - return (_service,_other_services) + return (_service, _other_services)
View file
pykolab-0.8.15.tar.gz/pykolab/xml/attendee.py -> pykolab-0.8.16.tar.gz/pykolab/xml/attendee.py
Changed
@@ -33,6 +33,8 @@ "INDIVIDUAL": kolabformat.CutypeIndividual, "RESOURCE": kolabformat.CutypeResource, "GROUP": kolabformat.CutypeGroup, + "ROOM": kolabformat.CutypeRoom, + "UNKNOWN": kolabformat.CutypeUnknown, } participant_status_map = {
View file
pykolab-0.8.15.tar.gz/share/templates/freshclam.conf.tpl -> pykolab-0.8.16.tar.gz/share/templates/freshclam.conf.tpl
Changed
@@ -10,7 +10,7 @@ # Path to the log file (make sure it has proper permissions) # Default: disabled -UpdateLogFile /var/log/clamav/freshclam.log +# UpdateLogFile /var/log/clamav/freshclam.log # Maximum size of the log file. # Value of 0 disables the limit.
View file
pykolab-0.8.15.tar.gz/share/templates/imapd.conf.tpl -> pykolab-0.8.16.tar.gz/share/templates/imapd.conf.tpl
Changed
@@ -1,7 +1,7 @@ -configdirectory: /var/lib/imap -partition-default: /var/spool/imap +configdirectory: $configdirectory +partition-default: $partition_default admins: $admins -sievedir: /var/lib/imap/sieve +sievedir: $sievedir sendmail: /usr/sbin/sendmail sasl_pwcheck_method: saslauthd sasl_mech_list: PLAIN LOGIN
View file
pykolab-0.8.15.tar.gz/tests/unit/test-002-attendee.py -> pykolab-0.8.16.tar.gz/tests/unit/test-002-attendee.py
Changed
@@ -4,6 +4,7 @@ from pykolab.xml import Attendee from pykolab.xml import participant_status_label +from pykolab.xml.attendee import InvalidAttendeeCutypeError class TestEventXML(unittest.TestCase): @@ -96,17 +97,21 @@ self.assertEqual([k for k, v in self.attendee.role_map.iteritems() if v == 3][0], "NON-PARTICIPANT") def test_015_cutype_map_length(self): - self.assertEqual(len(self.attendee.cutype_map.keys()), 3) + self.assertEqual(len(self.attendee.cutype_map.keys()), 5) def test_016_cutype_map_forward_lookup(self): - self.assertEqual(self.attendee.cutype_map["GROUP"], 1) - self.assertEqual(self.attendee.cutype_map["INDIVIDUAL"], 2) - self.assertEqual(self.attendee.cutype_map["RESOURCE"], 3) + self.assertEqual(self.attendee.cutype_map["GROUP"], kolabformat.CutypeGroup) + self.assertEqual(self.attendee.cutype_map["INDIVIDUAL"], kolabformat.CutypeIndividual) + self.assertEqual(self.attendee.cutype_map["RESOURCE"], kolabformat.CutypeResource) + self.assertEqual(self.attendee.cutype_map["ROOM"], kolabformat.CutypeRoom) + self.assertEqual(self.attendee.cutype_map["UNKNOWN"], kolabformat.CutypeUnknown) def test_017_cutype_map_reverse_lookup(self): - self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == 1][0], "GROUP") - self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == 2][0], "INDIVIDUAL") - self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == 3][0], "RESOURCE") + self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == kolabformat.CutypeGroup][0], "GROUP") + self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == kolabformat.CutypeIndividual][0], "INDIVIDUAL") + self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == kolabformat.CutypeResource][0], "RESOURCE") + self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == kolabformat.CutypeRoom][0], "ROOM") + self.assertEqual([k for k, v in self.attendee.cutype_map.iteritems() if v == kolabformat.CutypeUnknown][0], "UNKNOWN") def test_018_partstat_label(self): self.assertEqual(participant_status_label('NEEDS-ACTION'), "Needs Action") @@ -133,5 +138,8 @@ self.assertEqual(data['email'], 'jane@doe.org') self.assertTrue(data['rsvp']) + def test_030_to_cutype_exception(self): + self.assertRaises(InvalidAttendeeCutypeError, self.attendee.set_cutype, "DUMMY") + if __name__ == '__main__': unittest.main()
View file
pykolab-0.8.15.tar.gz/tests/unit/test-003-event.py -> pykolab-0.8.16.tar.gz/tests/unit/test-003-event.py
Changed
@@ -41,7 +41,7 @@ CLASS:PUBLIC ATTENDEE;CN="Manager, Jane";PARTSTAT=NEEDS-ACTION;ROLE=REQ-PARTICIPANT;CUTYP E=INDIVIDUAL;RSVP=TRUE:mailto:jane.manager@example.org -ATTENDEE;CUTYPE=RESOURCE;PARTSTAT=NEEDS-ACTION;ROLE=OPT-PARTICIPANT;RSVP=FA +ATTENDEE;CUTYPE=ROOM;PARTSTAT=NEEDS-ACTION;ROLE=OPT-PARTICIPANT;RSVP=FA LSE:MAILTO:max@imum.com ORGANIZER;CN=Doe\, John:mailto:john.doe@example.org URL:http://somelink.com/foo @@ -495,7 +495,7 @@ self.assertEqual(event.get_categories(), ["Personal"]) self.assertEqual(event.get_priority(), '2') self.assertEqual(event.get_classification(), kolabformat.ClassPublic) - self.assertEqual(event.get_attendee_by_email("max@imum.com").get_cutype(), kolabformat.CutypeResource) + self.assertEqual(event.get_attendee_by_email("max@imum.com").get_cutype(), kolabformat.CutypeRoom) self.assertEqual(event.get_sequence(), 2) self.assertTrue(event.is_recurring()) self.assertIsInstance(event.get_duration(), datetime.timedelta)
View file
pykolab-0.8.15.tar.gz/tests/unit/test-009-parse_ldap_uri.py -> pykolab-0.8.16.tar.gz/tests/unit/test-009-parse_ldap_uri.py
Changed
@@ -8,9 +8,9 @@ def test_001_ldap_uri(self): ldap_uri = "ldap://localhost" result = utils.parse_ldap_uri(ldap_uri) - self.assertEqual(result, ("ldap", "localhost", "389", None, None, None, None)) + self.assertEqual(result, ("ldap", "localhost", "389", None, [None], None, None)) def test_002_ldap_uri_port(self): ldap_uri = "ldap://localhost:389" result = utils.parse_ldap_uri(ldap_uri) - self.assertEqual(result, ("ldap", "localhost", "389", None, None, None, None)) + self.assertEqual(result, ("ldap", "localhost", "389", None, [None], None, None))
View file
pykolab-0.8.15.tar.gz/wallace/__init__.py -> pykolab-0.8.16.tar.gz/wallace/__init__.py
Changed
@@ -36,13 +36,15 @@ import pykolab from pykolab import utils -from pykolab.translate import _ +from pykolab.logger import StderrToLogger +from pykolab.translate import _ as _l +import modules from modules import cb_action_ACCEPT # pylint: disable=invalid-name log = pykolab.getLogger('pykolab.wallace') -sys.stderr = pykolab.logger.StderrToLogger(log) +sys.stderr = StderrToLogger(log) conf = pykolab.getConf() @@ -54,7 +56,7 @@ # Cause the previous modules to be skipped wallace_modules = wallace_modules[(wallace_modules.index(kwargs['module']) + 1):] - log.debug(_("Wallace modules: %r") % (wallace_modules), level=8) + log.debug(_l("Wallace modules: %r") % (wallace_modules), level=8) # Execute the module if 'stage' in kwargs: @@ -129,31 +131,39 @@ while True: while not self.finished.is_set(): self.finished.wait(self.interval) - log.debug(_("Timer looping function '%s' every %ss") % ( + log.debug(_l("Timer looping function '%s' every %ss") % ( self.function.__name__, self.interval ), level=8) self.function(*self.args, **self.kwargs) self.finished.set() - log.debug(_("Timer loop %s") % ('still active','finished')[self.finished.is_set()], level=8) + log.debug( + _l("Timer loop %s") % ('still active', 'finished')[self.finished.is_set()], + level=8 + ) + break + class WallaceDaemon: + heartbeat = None + timer = None + def __init__(self): self.current_connections = 0 self.max_connections = 24 self.parent_pid = None self.pool = None - daemon_group = conf.add_cli_parser_option_group(_("Daemon Options")) + daemon_group = conf.add_cli_parser_option_group(_l("Daemon Options")) daemon_group.add_option( "--fork", dest="fork_mode", action="store_true", default=False, - help=_("Fork to the background.") + help=_l("Fork to the background.") ) daemon_group.add_option( @@ -161,7 +171,7 @@ dest="wallace_bind_address", action="store", default="localhost", - help=_("Bind address for Wallace.") + help=_l("Bind address for Wallace.") ) daemon_group.add_option( @@ -169,7 +179,7 @@ dest="process_groupname", action="store", default="kolab", - help=_("Run as group GROUPNAME"), + help=_l("Run as group GROUPNAME"), metavar="GROUPNAME" ) @@ -179,16 +189,16 @@ action="store", default=4, type=int, - help=_("Number of threads to use.") + help=_l("Number of threads to use.") ) daemon_group.add_option( "--max-tasks", - dest = "max_tasks", - action = "store", - default = None, - type = int, - help = _("Number of tasks per process.") + dest="max_tasks", + action="store", + default=None, + type=int, + help=_l("Number of tasks per process.") ) daemon_group.add_option( @@ -196,7 +206,7 @@ dest="pidfile", action="store", default="/var/run/wallaced/wallaced.pid", - help=_("Path to the PID file to use.") + help=_l("Path to the PID file to use.") ) daemon_group.add_option( @@ -205,7 +215,7 @@ action="store", default=10026, type=int, - help=_("Port that Wallace is supposed to use.") + help=_l("Port that Wallace is supposed to use.") ) daemon_group.add_option( @@ -213,7 +223,7 @@ dest="process_username", action="store", default="kolab", - help=_("Run as user USERNAME"), + help=_l("Run as user USERNAME"), metavar="USERNAME" ) @@ -230,13 +240,14 @@ mp_logger.setLevel(multiprocessing.SUBDEBUG) mp_logger.debug('Python multi-processing logger started') - import modules - modules.__init__() + modules.initialize() self.modules = conf.get_list('wallace', 'modules') if not self.modules: self.modules = [] + # pylint: disable=too-many-branches + # pylint: disable=too-many-statements def do_wallace(self): self.parent_pid = os.getpid() @@ -264,7 +275,7 @@ # pylint: disable=broad-except except Exception: log.warning( - _("Could not bind to socket on port %d on bind address %s") % ( + _l("Could not bind to socket on port %d on bind address %s") % ( conf.wallace_port, conf.wallace_bind_address ) @@ -277,7 +288,7 @@ # pylint: disable=broad-except except Exception: - log.warning(_("Could not shut down socket")) + log.warning(_l("Could not shut down socket")) time.sleep(1) s.close() @@ -287,7 +298,10 @@ s.listen(5) self.timer = Timer(180, self.pickup_spool_messages, args=[], kwargs={'sync': True}) + + # pylint: disable=attribute-defined-outside-init self.timer.daemon = True + self.timer.start() # start background process to run periodic jobs in active modules @@ -301,6 +315,7 @@ self.heartbeat.finished = multiprocessing.Event() self.heartbeat.daemon = True self.heartbeat.start() + except Exception as errmsg: log.error("Failed to start heartbeat daemon: %s" % (errmsg)) finally: @@ -312,11 +327,23 @@ try: while 1: while self.current_connections >= self.max_connections: - log.debug(_("Reached limit of max connections of: %s. Sleeping for 0.5s") % self.max_connections, level=6) + log.debug( + _l("Reached limit of max connections of: %s. Sleeping for 0.5s") % ( + self.max_connections + ), + level=6 + ) + time.sleep(0.5) pair = s.accept() - log.debug(_("Accepted connection %r with address %r") % (pair if pair is not None else (None, None)), level=8) + log.debug( + _l("Accepted connection %r with address %r") % ( + pair if pair is not None else (None, None) + ), + level=8 + ) + if pair is not None: self.current_connections += 1 connection, address = pair @@ -326,11 +353,11 @@ if conf.debuglevel > 8: _smtpd.DEBUGSTREAM = pykolab.logger.StderrToLogger(log) - log.debug(_("Creating SMTPChannel for accepted message"), level=8) - channel = _smtpd.SMTPChannel(self, connection, address) + log.debug(_l("Creating SMTPChannel for accepted message"), level=8) + _smtpd.SMTPChannel(self, connection, address) asyncore.loop() else: - log.error(_("Socket accepted, but (conn, address) tuple is None.")) + log.error(_l("Socket accepted, but (conn, address) tuple is None.")) # pylint: disable=broad-except except Exception: @@ -343,6 +370,7 @@ self.timer.cancel() self.timer.join() + # pylint: disable=no-self-use def data_header(self, mailfrom, rcpttos): COMMASPACE = ', ' @@ -354,7 +382,7 @@ pickup_path = '/var/spool/pykolab/wallace/' messages = [] - for root, directory, files in os.walk(pickup_path): + for root, _, files in os.walk(pickup_path): for filename in files: messages.append((root, filename)) @@ -455,7 +483,7 @@ os.write(fp, data) os.close(fp) - log.debug(_("Started processing accepted message %s") % filename, level=8) + log.debug(_l("Started processing accepted message %s") % filename, level=8) self.pool.apply_async(pickup_message, (filename, (self.modules))) self.current_connections -= 1 @@ -470,7 +498,7 @@ if os.getpid() == self.parent_pid: log.debug("Stopping process %s" % multiprocessing.current_process().name, level=8) - log.debug(_("Terminating processes pool"), level=8) + log.debug(_l("Terminating processes pool"), level=8) self.pool.close() if hasattr(self, 'timer'): @@ -479,7 +507,7 @@ self.timer.finished.set() self.timer.cancel() - log.debug(_("Terminating heartbeat process"), level=8) + log.debug(_l("Terminating heartbeat process"), level=8) self.heartbeat.finished.set() self.heartbeat.terminate() @@ -489,7 +517,7 @@ self.heartbeat.join(5) if os.access(conf.pidfile, os.R_OK): - log.warning(_("Removing PID file %s") % conf.pidfile) + log.warning(_l("Removing PID file %s") % conf.pidfile) os.remove(conf.pidfile) log.warning("Exiting!") @@ -520,8 +548,8 @@ try: try: - (ruid, euid, suid) = os.getresuid() - (rgid, egid, sgid) = os.getresgid() + (ruid, _, _) = os.getresuid() + (rgid, _, _) = os.getresgid() except AttributeError: ruid = os.getuid() rgid = os.getgid() @@ -531,22 +559,17 @@ if rgid == 0: # Get group entry details try: - ( - group_name, - group_password, - group_gid, - group_members - ) = grp.getgrnam(conf.process_groupname) + (_, _, group_gid, _) = grp.getgrnam(conf.process_groupname) except KeyError: - print(_("Group %s does not exist") % (conf.process_groupname)) + print(_l("Group %s does not exist") % (conf.process_groupname)) sys.exit(1) # Set real and effective group if not the same as current. if not group_gid == rgid: log.debug( - _("Switching real and effective group id to %d") % ( + _l("Switching real and effective group id to %d") % ( group_gid ), level=8 @@ -557,25 +580,17 @@ if ruid == 0: # Means we haven't switched yet. try: - ( - user_name, - user_password, - user_uid, - user_gid, - user_gecos, - user_homedir, - user_shell - ) = pwd.getpwnam(conf.process_username) + (_, _, user_uid, _, _, _, _) = pwd.getpwnam(conf.process_username) except KeyError: - print(_("User %s does not exist") % (conf.process_username)) + print(_l("User %s does not exist") % (conf.process_username)) sys.exit(1) # Set real and effective user if not the same as current. if not user_uid == ruid: log.debug( - _("Switching real and effective user id to %d") % ( + _l("Switching real and effective user id to %d") % ( user_uid ), level=8 @@ -585,7 +600,7 @@ # pylint: disable=broad-except except Exception: - log.error(_("Could not change real and effective uid and/or gid")) + log.error(_l("Could not change real and effective uid and/or gid")) try: pid = os.getpid() @@ -631,20 +646,20 @@ exitcode = errmsg except KeyboardInterrupt: exitcode = 1 - log.info(_("Interrupted by user")) + log.info(_l("Interrupted by user")) except AttributeError: exitcode = 1 traceback.print_exc() - print(_("Traceback occurred, please report a bug.")) + print(_l("Traceback occurred, please report a bug.")) except TypeError as errmsg: exitcode = 1 traceback.print_exc() - log.error(_("Type Error: %s") % errmsg) - except: + log.error(_l("Type Error: %s") % errmsg) + except Exception: exitcode = 2 traceback.print_exc() - print(_("Traceback occurred, please report a bug.")) + print(_l("Traceback occurred, please report a bug.")) sys.exit(exitcode) @@ -656,8 +671,8 @@ def write_pid(self): pid = os.getpid() if os.access(os.path.dirname(conf.pidfile), os.W_OK): - fp = open(conf.pidfile,'w') + fp = open(conf.pidfile, 'w') fp.write("%d\n" % (pid)) fp.close() else: - print(_("Could not write pid file %s") % (conf.pidfile)) + print(_l("Could not write pid file %s") % (conf.pidfile))
View file
pykolab-0.8.15.tar.gz/wallace/module_resources.py -> pykolab-0.8.16.tar.gz/wallace/module_resources.py
Changed
@@ -30,6 +30,7 @@ import random import re import signal +from six import string_types import time import uuid @@ -121,7 +122,7 @@ # pylint: disable=too-many-locals # pylint: disable=too-many-return-statements # pylint: disable=too-many-statements -def execute(*args, **kw): # noqa: C901 +def execute(*args, **kw): global auth, imap, extra_log_params # TODO: Test for correct call. @@ -329,14 +330,17 @@ ) # pylint: disable=broad-except - except Exception as e: - log.error(_("Could not find envelope sender attendee: %r") % (e)) + except Exception as errmsg: + log.error(_("Could not find envelope sender attendee: %r") % (errmsg)) continue # compare sequence number to avoid outdated replies if not itip_event['sequence'] == event.get_sequence(): log.info( - _("The iTip reply sequence (%r) doesn't match the referred event version (%r). Ignoring.") % ( + _( + "The iTip reply sequence (%r) doesn't match the " + + "referred event version (%r). Ignoring." + ) % ( itip_event['sequence'], event.get_sequence() ) @@ -358,9 +362,14 @@ elif owner_reply == kolabformat.PartDeclined: decline_reservation_request(_itip_event, receiving_resource) else: - log.info(_("Invalid response (%r) received from resource owner for event %r") % ( - sender_attendee.get_participant_status(True), reference_uid - )) + log.info( + _( + "Invalid response (%r) received from resource owner for event %r" + ) % ( + sender_attendee.get_participant_status(True), + reference_uid + ) + ) else: log.info( _("Event referenced by this REPLY (%r) not found in resource calendar") % ( @@ -387,8 +396,8 @@ ) # pylint: disable=broad-except - except Exception as e: - log.error(_("Could not find envelope attendee: %r") % (e)) + except Exception as errmsg: + log.error(_("Could not find envelope attendee: %r") % (errmsg)) continue # ignore updates and cancellations to resource collections who already delegated the event @@ -484,7 +493,7 @@ # This must have been a resource collection originally. # We have inserted the reference to the original resource # record in 'memberof'. - if available_resource.has_key('memberof'): + if 'memberof' in available_resource: original_resource = resources[available_resource['memberof']] atts = [a.get_email() for a in itip_event['xml'].get_attendees()] @@ -591,14 +600,19 @@ for resource_dn in resource_dns: resource_attrs = auth.get_entry_attributes(None, resource_dn, ['kolabtargetfolder']) - if resource_attrs.has_key('kolabtargetfolder'): + + if 'kolabtargetfolder' in resource_attrs: try: expunge_resource_calendar(resource_attrs['kolabtargetfolder']) # pylint: disable=broad-except - except Exception as e: - log.error(_("Expunge resource calendar for %s (%s) failed: %r") % ( - resource_dn, resource_attrs['kolabtargetfolder'], e - )) + except Exception as errmsg: + log.error( + _("Expunge resource calendar for %s (%s) failed: %r") % ( + resource_dn, + resource_attrs['kolabtargetfolder'], + errmsg + ) + ) imap.disconnect() @@ -606,6 +620,7 @@ heartbeat._lastrun = now + heartbeat._lastrun = 0 @@ -626,7 +641,13 @@ # might raise an exception, let that bubble targetfolder = imap.folder_quote(mailbox) - imap.set_acl(targetfolder, conf.get(conf.get('kolab', 'imap_backend'), 'admin_login'), "lrswipkxtecda") + + imap.set_acl( + targetfolder, + conf.get(conf.get('kolab', 'imap_backend'), 'admin_login'), + "lrswipkxtecda" + ) + imap.imap.m.select(targetfolder) typ, data = imap.imap.m.search(None, 'UNDELETED') @@ -642,8 +663,8 @@ try: event = event_from_message(message_from_string(data[0][1])) # pylint: disable=broad-except - except Exception as e: - log.error(_("Failed to parse event from message %s/%s: %r") % (mailbox, num, e)) + except Exception as errmsg: + log.error(_("Failed to parse event from message %s/%s: %r") % (mailbox, num, errmsg)) continue if event: @@ -658,7 +679,17 @@ if dt_end and dt_end < expire_date: age = now - dt_end - log.debug(_("Flag event %s from message %s/%s as deleted (age = %d days)") % (event.uid, mailbox, num, age.days), level=8) + + log.debug( + _("Flag event %s from message %s/%s as deleted (age = %d days)") % ( + event.uid, + mailbox, + num, + age.days + ), + level=8 + ) + imap.imap.m.store(num, '+FLAGS', '\\Deleted') imap.imap.m.expunge() @@ -676,7 +707,7 @@ for resource in resources.keys(): # skip this for resource collections - if not resources[resource].has_key('kolabtargetfolder'): + if 'kolabtargetfolder' not in resources[resource]: continue # sets the 'conflicting' flag and adds a list of conflicting events found @@ -688,28 +719,42 @@ end = time.time() - log.debug(_("start: %r, end: %r, total: %r, messages: %d") % (start, end, (end-start), num_messages), level=8) - + log.debug( + _("start: %r, end: %r, total: %r, messages: %d") % ( + start, + end, + (end - start), + num_messages + ), + level=8 + ) # For each resource (collections are first!) # check conflicts and either accept or decline the reservation request for resource in resource_dns: log.debug(_("Polling for resource %r") % (resource), level=8) - if not resources.has_key(resource): + if resource not in resources: log.debug(_("Resource %r has been popped from the list") % (resource), level=8) continue - if not resources[resource].has_key('conflicting_events'): + if 'conflicting_events' not in resources[resource]: log.debug(_("Resource is a collection"), level=8) # check if there are non-conflicting collection members - conflicting_members = [x for x in resources[resource]['uniquemember'] if resources[x]['conflict']] + conflicting_members = [ + x for x in resources[resource]['uniquemember'] + if resources[x]['conflict'] + ] # found at least one non-conflicting member, remove the conflicting ones and continue if len(conflicting_members) < len(resources[resource]['uniquemember']): for member in conflicting_members: - resources[resource]['uniquemember'] = [x for x in resources[resource]['uniquemember'] if x != member] + resources[resource]['uniquemember'] = [ + x for x in resources[resource]['uniquemember'] + if x != member + ] + del resources[member] log.debug(_("Removed conflicting resources from %r: (%r) => %r") % ( @@ -724,7 +769,13 @@ continue if len(resources[resource]['conflicting_events']) > 0: - log.debug(_("Conflicting events: %r for resource %r") % (resources[resource]['conflicting_events'], resource), level=8) + log.debug( + _("Conflicting events: %r for resource %r") % ( + resources[resource]['conflicting_events'], + resource + ), + level=8 + ) done = False @@ -734,24 +785,41 @@ if itip_event['recurrence-id'] is not None: continue + _eas = [a.get_email() for a in itip_event['xml'].get_attendees()] # Now we have the event that was conflicting - if resources[resource]['mail'] in [a.get_email() for a in itip_event['xml'].get_attendees()]: + if resources[resource]['mail'] in _eas: # this resource initially was delegated from a collection ? - if receiving_attendee and receiving_attendee.get_email() == resources[resource]['mail'] \ + if receiving_attendee \ + and receiving_attendee.get_email() == resources[resource]['mail'] \ and len(receiving_attendee.get_delegated_from()) > 0: + for delegator in receiving_attendee.get_delegated_from(): collection_data = get_resource_collection(delegator.email()) if collection_data is not None: # check if another collection member is available - (available_resource, dummy) = check_availability(itip_events, collection_data[0], collection_data[1]) + (available_resource, dummy) = check_availability( + itip_events, + collection_data[0], + collection_data[1] + ) + break if available_resource is not None: - log.debug(_("Delegate to another resource collection member: %r to %r") % \ - (resources[resource]['mail'], available_resource['mail']), level=8) + log.debug( + _("Delegate to another resource collection member: %r to %r") % ( + resources[resource]['mail'], + available_resource['mail'] + ), + level=8 + ) # set this new resource as delegate for the receiving_attendee - itip_event['xml'].delegate(resources[resource]['mail'], available_resource['mail'], available_resource['cn']) + itip_event['xml'].delegate( + resources[resource]['mail'], + available_resource['mail'], + available_resource['cn'] + ) # set delegator to NON-PARTICIPANT and RSVP=FALSE receiving_attendee.set_role(kolabformat.NonParticipant) @@ -761,7 +829,11 @@ # remove existing_events as we now delegated back to the collection if len(resources[resource]['existing_events']) > 0: for existing in resources[resource]['existing_events']: - delete_resource_event(existing.uid, resources[resource], existing._msguid) + delete_resource_event( + existing.uid, + resources[resource], + existing._msguid + ) done = True @@ -772,7 +844,8 @@ # No conflicts, go accept for itip_event in itip_events: # directly invited resource - if resources[resource]['mail'] in [a.get_email() for a in itip_event['xml'].get_attendees()]: + _eas = [a.get_email() for a in itip_event['xml'].get_attendees()] + if resources[resource]['mail'] in _eas: available_resource = resources[resource] done = True @@ -780,11 +853,14 @@ # This must have been a resource collection originally. # We have inserted the reference to the original resource # record in 'memberof'. - if resources[resource].has_key('memberof'): + if 'memberof' in resources[resource]: original_resource = resources[resources[resource]['memberof']] # Randomly select a target resource from the resource collection. - available_resource = resources[original_resource['uniquemember'][random.randint(0,(len(original_resource['uniquemember'])-1))]] + _selected = random.randint(0, (len(original_resource['uniquemember']) - 1)) + + available_resource = resources[original_resource['uniquemember'][_selected]] + done = True if done: @@ -889,8 +965,8 @@ imap.imap.m.select(imap.folder_quote(mailbox)) typ, data = imap.imap.m.search(None, '(UNDELETED HEADER SUBJECT "%s")' % (uid)) # pylint: disable=broad-except - except Exception as e: - log.error(_("Failed to access resource calendar:: %r") % (e)) + except Exception as errmsg: + log.error(_("Failed to access resource calendar:: %r") % (errmsg)) return event for num in reversed(data[0].split()): @@ -899,7 +975,7 @@ try: msguid = re.search(r"\WUID (\d+)", data[0][0]).group(1) # pylint: disable=broad-except - except Exception as e: + except Exception: log.error(_("No UID found in IMAP response: %r") % (data[0][0])) continue @@ -917,18 +993,25 @@ return (event, master) # compare recurrence-id and skip to next message if not matching - elif recurrence_id and not xmlutils.dates_equal(recurrence_id, event.get_recurrence_id()): - log.debug(_("Recurrence-ID not matching on message %s, skipping: %r != %r") % ( - msguid, recurrence_id, event.get_recurrence_id() - ), level=8) - continue + elif recurrence_id: + if not xmlutils.dates_equal(recurrence_id, event.get_recurrence_id()): + log.debug( + _("Recurrence-ID not matching on message %s, skipping: %r != %r") % ( + msguid, + recurrence_id, + event.get_recurrence_id() + ), + level=8 + ) + + continue if event is not None: setattr(event, '_msguid', msguid) # pylint: disable=broad-except - except Exception as e: - log.error(_("Failed to parse event from message %s/%s: %r") % (mailbox, num, e)) + except Exception as errmsg: + log.error(_("Failed to parse event from message %s/%s: %r") % (mailbox, num, errmsg)) event = None master = None continue @@ -939,7 +1022,13 @@ return (event, master) -def accept_reservation_request(itip_event, resource, delegator=None, confirmed=False, invitationpolicy=None): +def accept_reservation_request( + itip_event, + resource, + delegator=None, + confirmed=False, + invitationpolicy=None +): """ Accepts the given iTip event by booking it into the resource's calendar. Then set the attendee status of the given resource to @@ -961,7 +1050,7 @@ partstat = 'TENTATIVE' if confirmation_required else 'ACCEPTED' - itip_event['xml'].set_transparency(False); + itip_event['xml'].set_transparency(False) itip_event['xml'].set_attendee_participant_status( itip_event['xml'].get_attendee_by_email(resource['mail']), partstat @@ -1033,7 +1122,7 @@ save_event.add_exception(itip_event['xml']) # remove old copy of the reservation (also sets ACLs) - if resource.has_key('existing_events') and len(resource['existing_events']) > 0: + if 'existing_events' in resource and len(resource['existing_events']) > 0: for existing in resource['existing_events']: delete_resource_event(existing.uid, resource, existing._msguid) @@ -1042,7 +1131,11 @@ delete_resource_event(save_event.uid, resource, save_event._msguid) else: - imap.set_acl(targetfolder, conf.get(conf.get('kolab', 'imap_backend'), 'admin_login'), "lrswipkxtecda") + imap.set_acl( + targetfolder, + conf.get(conf.get('kolab', 'imap_backend'), 'admin_login'), + "lrswipkxtecda" + ) # append new version result = imap.imap.m.append( @@ -1069,7 +1162,12 @@ targetfolder = imap.folder_quote(resource['kolabtargetfolder']) try: - imap.set_acl(targetfolder, conf.get(conf.get('kolab', 'imap_backend'), 'admin_login'), "lrswipkxtecda") + imap.set_acl( + targetfolder, + conf.get(conf.get('kolab', 'imap_backend'), 'admin_login'), + "lrswipkxtecda" + ) + imap.imap.m.select(targetfolder) # delete by IMAP UID @@ -1103,14 +1201,14 @@ def reject(filepath): new_filepath = os.path.join( - mybasepath, - 'REJECT', - os.path.basename(filepath) - ) + mybasepath, + 'REJECT', + os.path.basename(filepath) + ) os.rename(filepath, new_filepath) filepath = new_filepath - exec('modules.cb_action_REJECT(%r, %r)' % ('resources',filepath)) + exec('modules.cb_action_REJECT(%r, %r)' % ('resources', filepath)) def resource_record_from_email_address(email_address): @@ -1127,7 +1225,7 @@ local_domains = auth.list_domains() - if not local_domains == None: + if local_domains is not None: local_domains = list(set(local_domains.keys())) if not email_address.split('@')[1] in local_domains: @@ -1146,8 +1244,8 @@ else: log.debug(_("No resource (collection) records found for %r") % (email_address), level=8) - elif isinstance(resource_records, basestring): - resource_records = [ resource_records ] + elif isinstance(resource_records, string_types): + resource_records = [resource_records] log.debug(_("Resource record: %r") % (resource_records), level=8) return resource_records @@ -1168,10 +1266,23 @@ log.debug(_("Raw itip_events: %r") % (itip_events), level=8) attendees_raw = [] - for list_attendees_raw in [x for x in [y['attendees'] for y in itip_events if y.has_key('attendees') and isinstance(y['attendees'], list)]]: + + _lars = [ + x for x in [ + y['attendees'] for y in itip_events + if 'attendees' in y and isinstance(y['attendees'], list) + ] + ] + + for list_attendees_raw in _lars: attendees_raw.extend(list_attendees_raw) - for list_attendees_raw in [y['attendees'] for y in itip_events if y.has_key('attendees') and isinstance(y['attendees'], basestring)]: + _lars = [ + y['attendees'] for y in itip_events + if 'attendees' in y and isinstance(y['attendees'], string_types) + ] + + for list_attendees_raw in _lars: attendees_raw.append(list_attendees_raw) log.debug(_("Raw set of attendees: %r") % (attendees_raw), level=8) @@ -1179,17 +1290,21 @@ # TODO: Resources are actually not implemented in the format. We reset this # list later. resources_raw = [] - for list_resources_raw in [x for x in [y['resources'] for y in itip_events if y.has_key('resources')]]: + _lrrs = [x for x in [y['resources'] for y in itip_events if 'resource' in y]] + + for list_resources_raw in _lrrs: resources_raw.extend(list_resources_raw) log.debug(_("Raw set of resources: %r") % (resources_raw), level=8) # consider organizer (in REPLY messages), too - organizers_raw = [re.sub('\+[A-Za-z0-9=/-]+@', '@', str(y['organizer'])) for y in itip_events if y.has_key('organizer')] + organizers_raw = [ + re.sub(r'\+[A-Za-z0-9=/-]+@', '@', str(y['organizer'])) for y in itip_events + if 'organizer' in y + ] log.debug(_("Raw set of organizers: %r") % (organizers_raw), level=8) - # TODO: We expect the format of an attendee line to literally be: # # ATTENDEE:RSVP=TRUE;ROLE=REQ-PARTICIPANT;MAILTO:lydia.bossers@kolabsys.com @@ -1218,7 +1333,7 @@ else: log.debug(_("No resource (collection) records found for %r") % (attendee), level=8) - elif isinstance(_resource_records, basestring): + elif isinstance(_resource_records, string_types): resource_records.append(_resource_records) log.debug(_("Resource record: %r") % (_resource_records), level=8) @@ -1242,7 +1357,10 @@ resources = [a for a in resources if a == recipient_email] for resource in resources: - log.debug(_("Checking if resource %r is a resource (collection)") % (resource), level=8) + log.debug( + _("Checking if resource %r is a resource (collection)") % (resource), + level=8 + ) _resource_records = auth.find_resource(resource) if isinstance(_resource_records, list): @@ -1251,17 +1369,21 @@ log.debug(_("Resource record(s): %r") % (_resource_records), level=8) else: - log.debug(_("No resource (collection) records found for %r") % (resource), level=8) + log.debug( + _("No resource (collection) records found for %r") % (resource), + level=8 + ) - elif isinstance(_resource_records, basestring): + elif isinstance(_resource_records, string_types): resource_records.append(_resource_records) log.debug(_("Resource record: %r") % (_resource_records), level=8) else: log.warning(_("Resource reservation made but no resource records found")) - - log.debug(_("The following resources are being referred to in the " + \ - "iTip: %r") % (resource_records), level=8) + log.debug( + _("The following resources are being referred to in the iTip: %r") % (resource_records), + level=8 + ) return resource_records @@ -1282,17 +1404,17 @@ resource_attrs['dn'] = resource_dn parse_kolabinvitationpolicy(resource_attrs) - if not 'kolabsharedfolder' in [x.lower() for x in resource_attrs['objectclass']]: - if resource_attrs.has_key('uniquemember'): + if 'kolabsharedfolder' not in [x.lower() for x in resource_attrs['objectclass']]: + if 'uniquemember' in resource_attrs: if not isinstance(resource_attrs['uniquemember'], list): - resource_attrs['uniquemember'] = [ resource_attrs['uniquemember'] ] + resource_attrs['uniquemember'] = [resource_attrs['uniquemember']] resources[resource_dn] = resource_attrs for uniquemember in resource_attrs['uniquemember']: member_attrs = auth.get_entry_attributes( - None, - uniquemember, - ['*'] - ) + None, + uniquemember, + ['*'] + ) if 'kolabsharedfolder' in [x.lower() for x in member_attrs['objectclass']]: member_attrs['dn'] = uniquemember @@ -1300,8 +1422,10 @@ resources[uniquemember] = member_attrs resources[uniquemember]['memberof'] = resource_dn - if not member_attrs.has_key('owner') and resources[resource_dn].has_key('owner'): + + if 'owner' not in member_attrs and 'owner' in resources[resource_dn]: resources[uniquemember]['owner'] = resources[resource_dn]['owner'] + resource_dns.append(uniquemember) else: resources[resource_dn] = resource_attrs @@ -1310,23 +1434,26 @@ def parse_kolabinvitationpolicy(attrs, parent=None): - if attrs.has_key('kolabinvitationpolicy'): + if 'kolabinvitationpolicy' in attrs: if not isinstance(attrs['kolabinvitationpolicy'], list): attrs['kolabinvitationpolicy'] = [attrs['kolabinvitationpolicy']] - attrs['kolabinvitationpolicy'] = [policy_name_map[p] for p in attrs['kolabinvitationpolicy'] if policy_name_map.has_key(p)] - elif isinstance(parent, dict) and parent.has_key('kolabinvitationpolicy'): + attrs['kolabinvitationpolicy'] = [ + policy_name_map[p] for p in attrs['kolabinvitationpolicy'] if p in policy_name_map + ] + + elif isinstance(parent, dict) and 'kolabinvitationpolicy' in parent: attrs['kolabinvitationpolicy'] = parent['kolabinvitationpolicy'] def get_resource_collection(email_address): """ - + Obtain a resource collection object from an email address. """ resource_dns = resource_record_from_email_address(email_address) if len(resource_dns) == 1: resource_attrs = auth.get_entry_attributes(None, resource_dns[0], ['objectclass']) - if not 'kolabsharedfolder' in [x.lower() for x in resource_attrs['objectclass']]: + if 'kolabsharedfolder' not in [x.lower() for x in resource_attrs['objectclass']]: resources = get_resource_records(resource_dns) return (resource_dns, resources) @@ -1345,9 +1472,9 @@ owners = [] - if resource.has_key('owner'): + if 'owner' in resource: if not isinstance(resource['owner'], list): - owners = [ resource['owner'] ] + owners = [resource['owner']] else: owners = resource['owner'] @@ -1355,16 +1482,16 @@ # get owner attribute from collection collections = auth.search_entry_by_attribute('uniquemember', resource['dn']) if not isinstance(collections, list): - collections = [ collections ] + collections = [collections] - for dn,collection in collections: - if collection.has_key('owner') and isinstance(collection['owner'], list): + for dn, collection in collections: + if 'owner' in collection and isinstance(collection['owner'], list): owners += collection['owner'] - elif collection.has_key('owner'): + elif 'owner' in collection: owners.append(collection['owner']) for dn in owners: - owner = auth.get_entry_attributes(None, dn, ['cn','mail','telephoneNumber']) + owner = auth.get_entry_attributes(None, dn, ['cn', 'mail', 'telephoneNumber']) if owner is not None: return owner @@ -1377,7 +1504,7 @@ """ global auth - if not resource.has_key('kolabinvitationpolicy') or resource['kolabinvitationpolicy'] is None: + if 'kolabinvitationpolicy' not in resource or resource['kolabinvitationpolicy'] is None: if not auth: auth = Auth() auth.connect() @@ -1385,21 +1512,24 @@ # get kolabinvitationpolicy attribute from collection collections = auth.search_entry_by_attribute('uniquemember', resource['dn']) if not isinstance(collections, list): - collections = [ (collections['dn'],collections) ] + collections = [(collections['dn'], collections)] - log.debug(_("Check collections %r for kolabinvitationpolicy attributes") % (collections), level=8) + log.debug( + _("Check collections %r for kolabinvitationpolicy attributes") % (collections), + level=8 + ) - for dn,collection in collections: + for dn, collection in collections: # ldap.search_entry_by_attribute() doesn't return the attributes lower-cased - if collection.has_key('kolabInvitationPolicy'): + if 'kolabInvitationPolicy' in collection: collection['kolabinvitationpolicy'] = collection['kolabInvitationPolicy'] - if collection.has_key('kolabinvitationpolicy'): + if 'kolabinvitationpolicy' in collection: parse_kolabinvitationpolicy(collection) resource['kolabinvitationpolicy'] = collection['kolabinvitationpolicy'] break - return resource['kolabinvitationpolicy'] if resource.has_key('kolabinvitationpolicy') else None + return resource['kolabinvitationpolicy'] if 'kolabinvitationpolicy' in resource else None def send_response(from_address, itip_events, owner=None): @@ -1410,7 +1540,7 @@ """ if isinstance(itip_events, dict): - itip_events = [ itip_events ] + itip_events = [itip_events] for itip_event in itip_events: attendee = itip_event['xml'].get_attendee_by_email(from_address) @@ -1423,7 +1553,10 @@ # Extra actions to take: send delegated reply if participant_status == "DELEGATED": - delegatee = [a for a in itip_event['xml'].get_attendees() if from_address in a.get_delegated_from(True)][0] + delegatee = [ + a for a in itip_event['xml'].get_attendees() + if from_address in a.get_delegated_from(True) + ][0] delegated_message_text = _(""" *** This is an automated response, please do not reply! *** @@ -1431,31 +1564,45 @@ Your reservation was delegated to "%s" which is available for the requested time. """) % (delegatee.get_name()) - pykolab.itip.send_reply(from_address, itip_event, delegated_message_text, - subject=subject_template) + pykolab.itip.send_reply( + from_address, + itip_event, + delegated_message_text, + subject=subject_template + ) # adjust some vars for the regular reply from the delegatee message_text = reservation_response_text(delegatee.get_participant_status(True), owner) from_address = delegatee.get_email() time.sleep(2) - pykolab.itip.send_reply(from_address, itip_event, message_text, - subject=subject_template) + pykolab.itip.send_reply( + from_address, + itip_event, + message_text, + subject=subject_template + ) def reservation_response_text(status, owner): message_text = _(""" *** This is an automated response, please do not reply! *** - + We hereby inform you that your reservation was %s. """) % (participant_status_label(status)) if owner: - message_text += _(""" - If you have questions about this reservation, please contact - %s <%s> %s - """) % (owner['cn'], owner['mail'], owner['telephoneNumber'] if owner.has_key('telephoneNumber') else '') - + message_text += _( + """ + If you have questions about this reservation, please contact + %s <%s> %s + """ + ) % ( + owner['cn'], + owner['mail'], + owner['telephoneNumber'] if 'telephoneNumber' in owner else '' + ) + return message_text @@ -1492,7 +1639,7 @@ ) # change gettext language to the preferredlanguage setting of the resource owner - if owner.has_key('preferredlanguage'): + if 'preferredlanguage' in owner: pykolab.translate.setUserLanguage(owner['preferredlanguage']) message_text = owner_notification_text(resource, owner, itip_event['xml'], success) @@ -1515,23 +1662,32 @@ log.debug(_("Owner notification was sent successfully: %r") % result, level=8) signal.alarm(0) + def owner_notification_text(resource, owner, event, success): organizer = event.get_organizer() status = event.get_attendee_by_email(resource['mail']).get_participant_status(True) if success: - message_text = _(""" - The resource booking for %(resource)s by %(orgname)s <%(orgemail)s> has been %(status)s for %(date)s. + message_text = _( + """ + The resource booking for %(resource)s by %(orgname)s <%(orgemail)s> has been + %(status)s for %(date)s. - *** This is an automated message, sent to you as the resource owner. *** - """) + *** This is an automated message, sent to you as the resource owner. *** + """ + ) else: - message_text = _(""" - A reservation request for %(resource)s could not be processed automatically. - Please contact %(orgname)s <%(orgemail)s> who requested this resource for %(date)s. Subject: %(summary)s. + message_text = _( + """ + A reservation request for %(resource)s could not be processed automatically. + + Please contact %(orgname)s <%(orgemail)s> who requested this resource for %(date)s. + + Subject for the event: %(summary)s. - *** This is an automated message, sent to you as the resource owner. *** - """) + *** This is an automated message, sent to you as the resource owner. *** + """ + ) return message_text % { 'resource': resource['cn'], @@ -1545,16 +1701,20 @@ def send_owner_confirmation(resource, owner, itip_event): """ - Send a reservation request to the resource owner for manual confirmation (ACCEPT or DECLINE) + Send a reservation request to the resource owner for manual confirmation (ACCEPT or + DECLINE). - This clones the given invtation with a new UID and setting the resource as organizer in order to - receive the reply from the owner. + This clones the given invtation with a new UID and setting the resource as organizer in + order to receive the reply from the owner. """ uid = itip_event['uid'] event = itip_event['xml'] organizer = event.get_organizer() - event_attendees = [a.get_displayname() for a in event.get_attendees() if not a.get_cutype() == kolabformat.CutypeResource] + event_attendees = [ + a.get_displayname() for a in event.get_attendees() + if not a.get_cutype() == kolabformat.CutypeResource + ] log.debug( _("Clone invitation for owner confirmation: %r from %r") % ( @@ -1571,23 +1731,31 @@ # add resource owner as (the sole) attendee event._attendees = [] - event.add_attendee(owner['mail'], owner['cn'], rsvp=True, role=kolabformat.Required, participant_status=kolabformat.PartNeedsAction) + event.add_attendee( + owner['mail'], + owner['cn'], + rsvp=True, + role=kolabformat.Required, + participant_status=kolabformat.PartNeedsAction + ) # flag this iTip message as confirmation type event.add_custom_property('X-Kolab-InvitationType', 'CONFIRMATION') - message_text = _(""" - A reservation request for %(resource)s requires your approval! - Please either accept or decline this invitation without saving it to your calendar. + message_text = _( + """ + A reservation request for %(resource)s requires your approval! + Please either accept or decline this invitation without saving it to your calendar. - The reservation request was sent from %(orgname)s <%(orgemail)s>. + The reservation request was sent from %(orgname)s <%(orgemail)s>. - Subject: %(summary)s. - Date: %(date)s - Participants: %(attendees)s + Subject: %(summary)s. + Date: %(date)s + Participants: %(attendees)s - *** This is an automated message, please don't reply by email. *** - """)% { + *** This is an automated message, please don't reply by email. *** + """ + ) % { 'resource': resource['cn'], 'orgname': organizer.name(), 'orgemail': organizer.email(), @@ -1596,8 +1764,10 @@ 'attendees': ",\n+ ".join(event_attendees) } - pykolab.itip.send_request(owner['mail'], itip_event, message_text, + pykolab.itip.send_request( + owner['mail'], + itip_event, + message_text, subject=_('Booking request for %s requires confirmation') % (resource['cn']), - direct=True) - - + direct=True + )
View file
pykolab-0.8.15.tar.gz/wallace/modules.py -> pykolab-0.8.16.tar.gz/wallace/modules.py
Changed
@@ -51,7 +51,7 @@ modules = {} -def __init__(): +def initialize(): # We only want the base path modules_base_path = os.path.dirname(__file__)
View file
pykolab.dsc
Changed
@@ -2,7 +2,7 @@ Source: pykolab Binary: pykolab, kolab-cli, kolab-conf, kolab-saslauthd, kolab-server, kolab-telemetry, kolab-xml, wallace Architecture: all -Version: 0.8.15-0~kolab1 +Version: 0.8.15-0~kolab2 Maintainer: Jeroen van Meeuwen (Kolab Systems) <vanmeeuwen@kolabsys.com> Uploaders: Paul Klos <kolab@klos2day.nl> Homepage: http://www.kolab.org
Locations
Projects
Search
Status Monitor
Help
Open Build Service
OBS Manuals
API Documentation
OBS Portal
Reporting a Bug
Contact
Mailing List
Forums
Chat (IRC)
Twitter
Open Build Service (OBS)
is an
openSUSE project
.