Changeset - 609e06b6c52f
[Not reviewed]
default
0 2 0
Mads Kiilerich - 12 years ago 2013-12-10 19:30:37
madski@unity3d.com
mail: only bother admins when that really is the intention - not when there just not happens to be other recipients
2 files changed with 13 insertions and 6 deletions:
0 comments (0 inline, 0 general)
kallithea/lib/celerylib/tasks.py
Show inline comments
 
@@ -72,402 +72,407 @@ def get_logger(cls):
 
        log = logging.getLogger(__name__)
 

	
 
    return log
 

	
 

	
 
@task(ignore_result=True)
 
@locked_task
 
@dbsession
 
def whoosh_index(repo_location, full_index):
 
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
    log = get_logger(whoosh_index)
 
    DBS = get_session()
 

	
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=DBS)\
 
                         .run(full_index=full_index)
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
 
    log = get_logger(get_commits_stats)
 
    DBS = get_session()
 
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config['app_conf']['cache_dir']
 

	
 
    log.info('running task with lockkey %s' % lockkey)
 

	
 
    try:
 
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
 

	
 
        # for js data compatibility cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is None:
 
            return True
 

	
 
        repo = repo.scm_instance
 
        repo_size = repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
        last_rev = None
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        dbrepo = DBS.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 
        cur_stats = DBS.query(Statistics)\
 
            .filter(Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == repo.get_changeset().revision and repo_size > 1:
 
            # pass silently without any work if we're not on first revision or
 
            # current state of parsing revision(from db marker) is the
 
            # last revision
 
            lock.release()
 
            return True
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s' % parse_limit)
 
        lmktime = mktime
 

	
 
        last_rev = last_rev + 1 if last_rev >= 0 else 0
 
        log.debug('Getting revisions from %s to %s' % (
 
             last_rev, last_rev + parse_limit)
 
        )
 
        for cs in repo[last_rev:last_rev + parse_limit]:
 
            log.debug('parsing %s' % cs)
 
            last_cs = cs  # remember last parsed changeset
 
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 

	
 
            if akc(cs.author) in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[akc(cs.author)]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = None
 

	
 
                if time_pos >= 0 and time_pos is not None:
 

	
 
                    datadict = \
 
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data']\
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time":k,
 
                                                 "commits":1,
 
                                                 "added":len(cs.added),
 
                                                 "changed":len(cs.changed),
 
                                                 "removed":len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            #gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = json.dumps(co_day_auth_aggr)
 
        stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
        log.debug('last revison %s' % last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s' % leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            DBS.add(stats)
 
            DBS.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            DBS.rollback()
 
            lock.release()
 
            return False
 

	
 
        # final release
 
        lock.release()
 

	
 
        # execute another task if celery is enabled
 
        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
 
            recurse_limit -= 1
 
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
 
                     recurse_limit)
 
        if recurse_limit <= 0:
 
            log.debug('Breaking recursive mode due to reach of recurse limit')
 
        return True
 
    except LockHeld:
 
        log.info('LockHeld')
 
        return 'Task with key %s already running' % lockkey
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def send_email(recipients, subject, body='', html_body=''):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, it this is empty the defined email
 
        address from field 'email_to' is used instead
 
    :param recipients: list of recipients, if this is None, the defined email
 
        address from field 'email_to' and all admins is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    :param html_body: html version of body
 
    """
 
    log = get_logger(send_email)
 
    DBS = get_session()
 
    assert isinstance(recipients, list), recipients
 

	
 
    email_config = config
 
    subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
 
    if not recipients:
 
    email_prefix = email_config.get('email_prefix', '')
 
    if email_prefix:
 
        subject = "%s %s" % (email_prefix, subject)
 
    if recipients is None:
 
        # if recipients are not defined we send to email_config + all admins
 
        admins = [u.email for u in User.query()
 
                  .filter(User.admin == True).all()]
 
        recipients = [email_config.get('email_to')] + admins
 
        log.warning("recipients not specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
 
    elif not recipients:
 
        log.error("No recipients specified")
 
        return False
 

	
 
    mail_from = email_config.get('app_email_from', 'Kallithea')
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(email_config.get('debug'))
 
    smtp_auth = email_config.get('smtp_auth')
 

	
 
    if not mail_server:
 
        log.error("SMTP mail server not configured - cannot send mail '%s' to %s", subject, ' '.join(recipients))
 
        log.warning("body:\n%s", body)
 
        log.warning("html:\n%s", html_body)
 
        return False
 

	
 
    try:
 
        m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body, html_body)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 
@task(ignore_result=False)
 
@dbsession
 
def create_repo(form_data, cur_user):
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.user import UserModel
 
    from kallithea.model.db import Setting
 

	
 
    log = get_logger(create_repo)
 
    DBS = get_session()
 

	
 
    cur_user = UserModel(DBS)._get_user(cur_user)
 

	
 
    owner = cur_user
 
    repo_name = form_data['repo_name']
 
    repo_name_full = form_data['repo_name_full']
 
    repo_type = form_data['repo_type']
 
    description = form_data['repo_description']
 
    private = form_data['repo_private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['repo_landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 
    copy_group_permissions = form_data.get('repo_copy_permissions')
 
    fork_of = form_data.get('fork_parent_id')
 
    state = form_data.get('repo_state', Repository.STATE_PENDING)
 

	
 
    # repo creation defaults, private and repo_type are filled in form
 
    defs = Setting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 

	
 
    try:
 
        repo = RepoModel(DBS)._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=description,
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions,
 
            copy_group_permissions=copy_group_permissions,
 
            enable_statistics=enable_statistics,
 
            enable_locking=enable_locking,
 
            enable_downloads=enable_downloads,
 
            state=state
 
        )
 

	
 
        action_logger(cur_user, 'user_created_repo',
 
                      form_data['repo_name_full'], '', DBS)
 

	
 
        DBS.commit()
 
        # now create this repo on Filesystem
 
        RepoModel(DBS)._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
 
            clone_uri=clone_uri,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception, e:
 
        log.warning('Exception %s occured when forking repository, '
 
                    'doing cleanup...' % e)
 
        # rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel(DBS)._delete_filesystem_repo(repo)
 
        raise
 

	
 
    # it's an odd fix to make celery fail task when exception occurs
 
    def on_failure(self, *args, **kwargs):
 
        pass
 

	
 
    return True
 

	
 

	
 
@task(ignore_result=False)
 
@dbsession
 
def create_repo_fork(form_data, cur_user):
 
    """
 
    Creates a fork of repository using interval VCS methods
 

	
 
    :param form_data:
 
    :param cur_user:
 
    """
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.user import UserModel
 

	
 
    log = get_logger(create_repo_fork)
 
    DBS = get_session()
 

	
 
    base_path = Repository.base_path()
 
    cur_user = UserModel(DBS)._get_user(cur_user)
 

	
 
    repo_name = form_data['repo_name']  # fork in this case
 
    repo_name_full = form_data['repo_name_full']
 

	
 
    repo_type = form_data['repo_type']
 
    owner = cur_user
 
    private = form_data['private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 

	
 
    try:
 
        fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
 

	
 
        fork_repo = RepoModel(DBS)._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=form_data['description'],
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions
 
        )
 
        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
 
                      fork_of.repo_name, '', DBS)
 
        DBS.commit()
 

	
 
        update_after_clone = form_data['update_after_clone']
 
        source_repo_path = os.path.join(base_path, fork_of.repo_name)
 

	
 
        # now create this repo on Filesystem
 
        RepoModel(DBS)._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
 
            clone_uri=source_repo_path,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception, e:
 
        log.warning('Exception %s occured when forking repository, '
 
                    'doing cleanup...' % e)
 
        #rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel(DBS)._delete_filesystem_repo(repo)
kallithea/model/notification.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.notification
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Model for notifications
 

	
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 20, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import os
 
import logging
 
import traceback
 

	
 
from pylons import tmpl_context as c
 
from pylons.i18n.translation import _
 

	
 
import kallithea
 
from kallithea.lib import helpers as h
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Notification, User, UserNotification
 
from kallithea.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class NotificationModel(BaseModel):
 

	
 
    cls = Notification
 

	
 
    def __get_notification(self, notification):
 
        if isinstance(notification, Notification):
 
            return notification
 
        elif isinstance(notification, (int, long)):
 
            return Notification.get(notification)
 
        else:
 
            if notification:
 
                raise Exception('notification must be int, long or Instance'
 
                                ' of Notification got %s' % type(notification))
 

	
 
    def create(self, created_by, subject, body, recipients=None,
 
               type_=Notification.TYPE_MESSAGE, with_email=True,
 
               email_kwargs={}, email_subject=None):
 
        """
 

	
 
        Creates notification of given type
 

	
 
        :param created_by: int, str or User instance. User who created this
 
            notification
 
        :param subject:
 
        :param body:
 
        :param recipients: list of int, str or User objects, when None
 
            is given send to all admins
 
        :param type_: type of notification
 
        :param with_email: send email with this notification
 
        :param email_kwargs: additional dict to pass as args to email template
 
        :param email_subject: use given subject as email subject
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 

	
 
        if recipients and not getattr(recipients, '__iter__', False):
 
            raise Exception('recipients must be a list or iterable')
 

	
 
        created_by_obj = self._get_user(created_by)
 

	
 
        recipients_objs = []
 
        if recipients:
 
            recipients_objs = []
 
            for u in recipients:
 
                obj = self._get_user(u)
 
                if obj:
 
                    recipients_objs.append(obj)
 
                else:
 
                    # TODO: inform user that requested operation couldn't be completed
 
                    log.error('cannot email unknown user %r', u)
 
            recipients_objs = set(recipients_objs)
 
            log.debug('sending notifications %s to %s' % (
 
                type_, recipients_objs)
 
            )
 
        else:
 
        elif recipients is None:
 
            # empty recipients means to all admins
 
            recipients_objs = User.query().filter(User.admin == True).all()
 
            log.debug('sending notifications %s to admins: %s' % (
 
                type_, recipients_objs)
 
            )
 
        #else: silently skip notification mails?
 

	
 
        # TODO: inform user who are notified
 
        notif = Notification.create(
 
            created_by=created_by_obj, subject=subject,
 
            body=body, recipients=recipients_objs, type_=type_
 
        )
 

	
 
        if not with_email:
 
            return notif
 

	
 
        #don't send email to person who created this comment
 
        rec_objs = set(recipients_objs).difference(set([created_by_obj]))
 

	
 
        # send email with notification to all other participants
 
        for rec in rec_objs:
 
            if not email_subject:
 
                email_subject = NotificationModel()\
 
                                    .make_description(notif, show_age=False)
 
            type_ = type_
 
            email_body = None  # we set body to none, we just send HTML emails
 
            ## this is passed into template
 
            kwargs = {'subject': subject, 'body': h.rst_w_mentions(body)}
 
            kwargs.update(email_kwargs)
 
            email_body_html = EmailNotificationModel()\
 
                                .get_email_tmpl(type_, **kwargs)
 

	
 
            run_task(tasks.send_email, [rec.email], email_subject, email_body,
 
                     email_body_html)
 

	
 
        return notif
 

	
 
    def delete(self, user, notification):
 
        # we don't want to remove actual notification just the assignment
 
        try:
 
            notification = self.__get_notification(notification)
 
            user = self._get_user(user)
 
            if notification and user:
 
                obj = UserNotification.query()\
 
                        .filter(UserNotification.user == user)\
 
                        .filter(UserNotification.notification
 
                                == notification)\
 
                        .one()
 
                Session().delete(obj)
 
                return True
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def get_for_user(self, user, filter_=None):
 
        """
 
        Get notifications for given user, filter them if filter dict is given
 

	
 
        :param user:
 
        :param filter:
 
        """
 
        user = self._get_user(user)
 

	
 
        q = UserNotification.query()\
 
            .filter(UserNotification.user == user)\
 
            .join((Notification, UserNotification.notification_id ==
 
                                 Notification.notification_id))
 

	
 
        if filter_:
 
            q = q.filter(Notification.type_.in_(filter_))
 

	
 
        return q.all()
 

	
 
    def mark_read(self, user, notification):
 
        try:
 
            notification = self.__get_notification(notification)
 
            user = self._get_user(user)
 
            if notification and user:
 
                obj = UserNotification.query()\
 
                        .filter(UserNotification.user == user)\
 
                        .filter(UserNotification.notification
 
                                == notification)\
 
                        .one()
 
                obj.read = True
 
                Session().add(obj)
 
                return True
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def mark_all_read_for_user(self, user, filter_=None):
 
        user = self._get_user(user)
 
        q = UserNotification.query()\
 
            .filter(UserNotification.user == user)\
 
            .filter(UserNotification.read == False)\
 
            .join((Notification, UserNotification.notification_id ==
 
                                 Notification.notification_id))
 
        if filter_:
 
            q = q.filter(Notification.type_.in_(filter_))
 

	
 
        # this is a little inefficient but sqlalchemy doesn't support
 
        # update on joined tables :(
 
        for obj in q.all():
 
            obj.read = True
 
            Session().add(obj)
 

	
 
    def get_unread_cnt_for_user(self, user):
 
        user = self._get_user(user)
 
        return UserNotification.query()\
 
                .filter(UserNotification.read == False)\
 
                .filter(UserNotification.user == user).count()
 

	
 
    def get_unread_for_user(self, user):
 
        user = self._get_user(user)
 
        return [x.notification for x in UserNotification.query()\
 
                .filter(UserNotification.read == False)\
 
                .filter(UserNotification.user == user).all()]
 

	
 
    def get_user_notification(self, user, notification):
 
        user = self._get_user(user)
 
        notification = self.__get_notification(notification)
 

	
 
        return UserNotification.query()\
 
            .filter(UserNotification.notification == notification)\
 
            .filter(UserNotification.user == user).scalar()
 

	
 
    def make_description(self, notification, show_age=True):
 
        """
 
        Creates a human readable description based on properties
 
        of notification object
 
        """
 
        #alias
 
        _n = notification
 
        _map = {
 
            _n.TYPE_CHANGESET_COMMENT: _('%(user)s commented on changeset at %(when)s'),
 
            _n.TYPE_MESSAGE: _('%(user)s sent message at %(when)s'),
 
            _n.TYPE_MENTION: _('%(user)s mentioned you at %(when)s'),
 
            _n.TYPE_REGISTRATION: _('%(user)s registered in Kallithea at %(when)s'),
 
            _n.TYPE_PULL_REQUEST: _('%(user)s opened new pull request at %(when)s'),
 
            _n.TYPE_PULL_REQUEST_COMMENT: _('%(user)s commented on pull request at %(when)s')
 
        }
 
        tmpl = _map[notification.type_]
 

	
 
        if show_age:
 
            when = h.age(notification.created_on)
 
        else:
 
            when = h.fmt_date(notification.created_on)
 

	
 
        return tmpl % dict(
 
            user=notification.created_by_user.username,
 
            when=when,
 
            )
 

	
 

	
 
class EmailNotificationModel(BaseModel):
 

	
 
    TYPE_CHANGESET_COMMENT = Notification.TYPE_CHANGESET_COMMENT
 
    TYPE_PASSWORD_RESET = 'password_link'
 
    TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
 
    TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
 
    TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
 
    TYPE_DEFAULT = 'default'
 

	
 
    def __init__(self):
 
        super(EmailNotificationModel, self).__init__()
 
        self._template_root = kallithea.CONFIG['pylons.paths']['templates'][0]
 
        self._tmpl_lookup = kallithea.CONFIG['pylons.app_globals'].mako_lookup
 
        self.email_types = {
 
            self.TYPE_CHANGESET_COMMENT: 'email_templates/changeset_comment.html',
 
            self.TYPE_PASSWORD_RESET: 'email_templates/password_reset.html',
 
            self.TYPE_REGISTRATION: 'email_templates/registration.html',
 
            self.TYPE_DEFAULT: 'email_templates/default.html',
 
            self.TYPE_PULL_REQUEST: 'email_templates/pull_request.html',
 
            self.TYPE_PULL_REQUEST_COMMENT: 'email_templates/pull_request_comment.html',
 
        }
 

	
 

	
 
    def get_email_tmpl(self, type_, **kwargs):
 
        """
 
        return generated template for email based on given type
 

	
 
        :param type_:
 
        """
 

	
 
        base = self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT])
 
        email_template = self._tmpl_lookup.get_template(base)
 
        # translator and helpers inject
 
        _kwargs = {'_': _,
 
                   'h': h,
 
                   'c': c}
 
        _kwargs.update(kwargs)
 
        log.debug('rendering tmpl %s with kwargs %s' % (base, _kwargs))
 
        return email_template.render(**_kwargs)
0 comments (0 inline, 0 general)