Changeset - 5725fa4cfecd
[Not reviewed]
default
0 14 0
Mads Kiilerich - 6 years ago 2019-08-04 00:07:10
mads@kiilerich.com
Grafted from: 7cc8ba8bc555
cleanup: minimize use of lambda expressions - we have 'def' for that purpose

Fix some flake8 warnings "E731 do not assign a lambda expression, use a def".
12 files changed:
0 comments (0 inline, 0 general)
kallithea/controllers/admin/repo_groups.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.repo_groups
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository groups controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Mar 23, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 

	
 
import formencode
 
from formencode import htmlfill
 
from tg import app_globals, request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from tg.i18n import ungettext
 
from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.utils2 import safe_int
 
from kallithea.model.db import RepoGroup, Repository
 
from kallithea.model.forms import RepoGroupForm, RepoGroupPermsForm
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.scm import AvailableRepoGroupChoices, RepoGroupList
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoGroupsController(BaseController):
 

	
 
    @LoginRequired(allow_default_user=True)
 
    def _before(self, *args, **kwargs):
 
        super(RepoGroupsController, self)._before(*args, **kwargs)
 

	
 
    def __load_defaults(self, extras=(), exclude=()):
 
        """extras is used for keeping current parent ignoring permissions
 
        exclude is used for not moving group to itself TODO: also exclude descendants
 
        Note: only admin can create top level groups
 
        """
 
        repo_groups = AvailableRepoGroupChoices([], 'admin', extras)
 
        exclude_group_ids = set(rg.group_id for rg in exclude)
 
        c.repo_groups = [rg for rg in repo_groups
 
                         if rg[0] not in exclude_group_ids]
 

	
 
    def __load_data(self, group_id):
 
        """
 
        Load defaults settings for edit, and update
 

	
 
        :param group_id:
 
        """
 
        repo_group = RepoGroup.get_or_404(group_id)
 
        data = repo_group.get_dict()
 
        data['group_name'] = repo_group.name
 

	
 
        # fill repository group users
 
        for p in repo_group.repo_group_to_perm:
 
            data.update({'u_perm_%s' % p.user.username:
 
                             p.permission.permission_name})
 

	
 
        # fill repository group groups
 
        for p in repo_group.users_group_to_perm:
 
            data.update({'g_perm_%s' % p.users_group.users_group_name:
 
                             p.permission.permission_name})
 

	
 
        return data
 

	
 
    def _revoke_perms_on_yourself(self, form_result):
 
        _up = [u for u in form_result['perms_updates'] if request.authuser.username == u[0]]
 
        _new = [u for u in form_result['perms_new'] if request.authuser.username == u[0]]
 
        if _new and _new[0][1] != 'group.admin' or _up and _up[0][1] != 'group.admin':
 
            return True
 
        return False
 

	
 
    def index(self, format='html'):
 
        _list = RepoGroup.query(sorted=True).all()
 
        group_iter = RepoGroupList(_list, perm_level='admin')
 
        repo_groups_data = []
 
        _tmpl_lookup = app_globals.mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        repo_group_name = lambda repo_group_name, children_groups: (
 
            template.get_def("repo_group_name")
 
        def repo_group_name(repo_group_name, children_groups):
 
            return template.get_def("repo_group_name") \
 
            .render_unicode(repo_group_name, children_groups, _=_, h=h, c=c)
 
        )
 
        repo_group_actions = lambda repo_group_id, repo_group_name, gr_count: (
 
            template.get_def("repo_group_actions")
 

	
 
        def repo_group_actions(repo_group_id, repo_group_name, gr_count):
 
            return template.get_def("repo_group_actions") \
 
            .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c,
 
                    ungettext=ungettext)
 
        )
 

	
 
        for repo_gr in group_iter:
 
            children_groups = [g.name for g in repo_gr.parents] + [repo_gr.name]
 
            repo_count = repo_gr.repositories.count()
 
            repo_groups_data.append({
 
                "raw_name": repo_gr.group_name,
 
                "group_name": repo_group_name(repo_gr.group_name, children_groups),
 
                "desc": h.escape(repo_gr.group_description),
 
                "repos": repo_count,
 
                "owner": h.person(repo_gr.owner),
 
                "action": repo_group_actions(repo_gr.group_id, repo_gr.group_name,
 
                                             repo_count)
 
            })
 

	
 
        c.data = {
 
            "sort": None,
 
            "dir": "asc",
 
            "records": repo_groups_data
 
        }
 

	
 
        return render('admin/repo_groups/repo_groups.html')
 

	
 
    def create(self):
 
        self.__load_defaults()
 

	
 
        # permissions for can create group based on parent_id are checked
 
        # here in the Form
 
        repo_group_form = RepoGroupForm(repo_groups=c.repo_groups)
 
        form_result = None
 
        try:
 
            form_result = repo_group_form.to_python(dict(request.POST))
 
            gr = RepoGroupModel().create(
 
                group_name=form_result['group_name'],
 
                group_description=form_result['group_description'],
 
                parent=form_result['parent_group_id'],
 
                owner=request.authuser.user_id, # TODO: make editable
 
                copy_permissions=form_result['group_copy_permissions']
 
            )
 
            Session().commit()
 
            # TODO: in future action_logger(, '', '', '')
 
        except formencode.Invalid as errors:
 
            return htmlfill.render(
 
                render('admin/repo_groups/repo_group_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during creation of repository group %s')
 
                    % request.POST.get('group_name'), category='error')
 
            if form_result is None:
 
                raise
 
            parent_group_id = form_result['parent_group_id']
 
            # TODO: maybe we should get back to the main view, not the admin one
 
            raise HTTPFound(location=url('repos_groups', parent_group=parent_group_id))
 
        h.flash(_('Created repository group %s') % gr.group_name,
 
                category='success')
 
        raise HTTPFound(location=url('repos_group_home', group_name=gr.group_name))
 

	
 
    def new(self):
 
        if HasPermissionAny('hg.admin')('group create'):
 
            # we're global admin, we're ok and we can create TOP level groups
 
            pass
 
        else:
 
            # we pass in parent group into creation form, thus we know
 
            # what would be the group, we can check perms here !
 
            group_id = safe_int(request.GET.get('parent_group'))
 
            group = RepoGroup.get(group_id) if group_id else None
 
            group_name = group.group_name if group else None
 
            if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'):
 
                pass
 
            else:
 
                raise HTTPForbidden()
 

	
 
        self.__load_defaults()
 
        return render('admin/repo_groups/repo_group_add.html')
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def update(self, group_name):
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 
        self.__load_defaults(extras=[c.repo_group.parent_group],
 
                             exclude=[c.repo_group])
 

	
 
        # TODO: kill allow_empty_group - it is only used for redundant form validation!
 
        if HasPermissionAny('hg.admin')('group edit'):
 
            # we're global admin, we're ok and we can create TOP level groups
 
            allow_empty_group = True
 
        elif not c.repo_group.parent_group:
 
            allow_empty_group = True
 
        else:
 
            allow_empty_group = False
 
        repo_group_form = RepoGroupForm(
 
            edit=True,
 
            old_data=c.repo_group.get_dict(),
 
            repo_groups=c.repo_groups,
 
            can_create_in_root=allow_empty_group,
 
        )()
 
        try:
 
            form_result = repo_group_form.to_python(dict(request.POST))
 

	
 
            new_gr = RepoGroupModel().update(group_name, form_result)
 
            Session().commit()
 
            h.flash(_('Updated repository group %s')
 
                    % form_result['group_name'], category='success')
 
            # we now have new name !
 
            group_name = new_gr.group_name
 
            # TODO: in future action_logger(, '', '', '')
 
        except formencode.Invalid as errors:
 
            c.active = 'settings'
 
            return htmlfill.render(
 
                render('admin/repo_groups/repo_group_edit.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of repository group %s')
 
                    % request.POST.get('group_name'), category='error')
 

	
 
        raise HTTPFound(location=url('edit_repo_group', group_name=group_name))
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def delete(self, group_name):
 
        gr = c.repo_group = RepoGroup.guess_instance(group_name)
 
        repos = gr.repositories.all()
 
        if repos:
 
            h.flash(_('This group contains %s repositories and cannot be '
 
                      'deleted') % len(repos), category='warning')
 
            raise HTTPFound(location=url('repos_groups'))
 

	
 
        children = gr.children.all()
 
        if children:
 
            h.flash(_('This group contains %s subgroups and cannot be deleted'
 
                      % (len(children))), category='warning')
 
            raise HTTPFound(location=url('repos_groups'))
 

	
 
        try:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            h.flash(_('Removed repository group %s') % group_name,
 
                    category='success')
 
            # TODO: in future action_logger(, '', '', '')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during deletion of repository group %s')
 
                    % group_name, category='error')
 

	
 
        if gr.parent_group:
 
            raise HTTPFound(location=url('repos_group_home', group_name=gr.parent_group.group_name))
 
        raise HTTPFound(location=url('repos_groups'))
 

	
 
    def show_by_name(self, group_name):
 
        """
 
        This is a proxy that does a lookup group_name -> id, and shows
 
        the group by id view instead
 
        """
 
        group_name = group_name.rstrip('/')
 
        id_ = RepoGroup.get_by_group_name(group_name)
 
        if id_:
 
            return self.show(group_name)
 
        raise HTTPNotFound
 

	
 
    @HasRepoGroupPermissionLevelDecorator('read')
 
    def show(self, group_name):
 
        c.active = 'settings'
 

	
 
        c.group = c.repo_group = RepoGroup.guess_instance(group_name)
 

	
 
        groups = RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all()
 
        repo_groups_list = self.scm_model.get_repo_groups(groups)
 

	
 
        repos_list = Repository.query(sorted=True).filter_by(group=c.group).all()
 
        c.data = RepoModel().get_repos_as_dict(repos_list,
 
                                               repo_groups_list=repo_groups_list,
 
                                               short_name=True)
 

	
 
        return render('admin/repo_groups/repo_group_show.html')
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def edit(self, group_name):
 
        c.active = 'settings'
 

	
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 
        self.__load_defaults(extras=[c.repo_group.parent_group],
 
                             exclude=[c.repo_group])
 
        defaults = self.__load_data(c.repo_group.group_id)
 

	
 
        return htmlfill.render(
 
            render('admin/repo_groups/repo_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def edit_repo_group_advanced(self, group_name):
 
        c.active = 'advanced'
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 

	
 
        return render('admin/repo_groups/repo_group_edit.html')
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def edit_repo_group_perms(self, group_name):
 
        c.active = 'perms'
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 
        self.__load_defaults()
 
        defaults = self.__load_data(c.repo_group.group_id)
 

	
 
        return htmlfill.render(
 
            render('admin/repo_groups/repo_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def update_perms(self, group_name):
 
        """
 
        Update permissions for given repository group
 

	
 
        :param group_name:
 
        """
 

	
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 
        valid_recursive_choices = ['none', 'repos', 'groups', 'all']
 
        form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST)
 
        if not request.authuser.is_admin:
 
            if self._revoke_perms_on_yourself(form_result):
 
                msg = _('Cannot revoke permission for yourself as admin')
 
                h.flash(msg, category='warning')
 
                raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name))
 
        recursive = form_result['recursive']
 
        # iterate over all members(if in recursive mode) of this groups and
 
        # set the permissions !
 
        # this can be potentially heavy operation
 
        RepoGroupModel()._update_permissions(c.repo_group,
 
                                             form_result['perms_new'],
 
                                             form_result['perms_updates'],
 
                                             recursive)
 
        # TODO: implement this
 
        #action_logger(request.authuser, 'admin_changed_repo_permissions',
 
        #              repo_name, request.ip_addr)
 
        Session().commit()
 
        h.flash(_('Repository group permissions updated'), category='success')
 
        raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name))
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def delete_perms(self, group_name):
 
        try:
 
            obj_type = request.POST.get('obj_type')
 
            obj_id = None
 
            if obj_type == 'user':
 
                obj_id = safe_int(request.POST.get('user_id'))
 
            elif obj_type == 'user_group':
 
                obj_id = safe_int(request.POST.get('user_group_id'))
 

	
 
            if not request.authuser.is_admin:
 
                if obj_type == 'user' and request.authuser.user_id == obj_id:
 
                    msg = _('Cannot revoke permission for yourself as admin')
 
                    h.flash(msg, category='warning')
 
                    raise Exception('revoke admin permission on self')
 
            recursive = request.POST.get('recursive', 'none')
 
            if obj_type == 'user':
 
                RepoGroupModel().delete_permission(repo_group=group_name,
 
                                                   obj=obj_id, obj_type='user',
 
                                                   recursive=recursive)
 
            elif obj_type == 'user_group':
 
                RepoGroupModel().delete_permission(repo_group=group_name,
 
                                                   obj=obj_id,
 
                                                   obj_type='user_group',
 
                                                   recursive=recursive)
 

	
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during revoking of permission'),
 
                    category='error')
 
            raise HTTPInternalServerError()
kallithea/controllers/admin/user_groups.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.user_groups
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
User Groups crud controller
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 25, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 

	
 
import formencode
 
from formencode import htmlfill
 
from sqlalchemy.orm import joinedload
 
from sqlalchemy.sql.expression import func
 
from tg import app_globals, request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPInternalServerError
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.utils2 import safe_int, safe_str
 
from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm
 
from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm
 
from kallithea.model.meta import Session
 
from kallithea.model.scm import UserGroupList
 
from kallithea.model.user_group import UserGroupModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserGroupsController(BaseController):
 
    """REST Controller styled on the Atom Publishing Protocol"""
 

	
 
    @LoginRequired(allow_default_user=True)
 
    def _before(self, *args, **kwargs):
 
        super(UserGroupsController, self)._before(*args, **kwargs)
 

	
 
    def __load_data(self, user_group_id):
 
        c.group_members_obj = sorted((x.user for x in c.user_group.members),
 
                                     key=lambda u: u.username.lower())
 

	
 
        c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
 
        c.available_members = sorted(((x.user_id, x.username) for x in
 
                                      User.query().all()),
 
                                     key=lambda u: u[1].lower())
 

	
 
    def __load_defaults(self, user_group_id):
 
        """
 
        Load defaults settings for edit, and update
 

	
 
        :param user_group_id:
 
        """
 
        user_group = UserGroup.get_or_404(user_group_id)
 
        data = user_group.get_dict()
 
        return data
 

	
 
    def index(self, format='html'):
 
        _list = UserGroup.query() \
 
                        .order_by(func.lower(UserGroup.users_group_name)) \
 
                        .all()
 
        group_iter = UserGroupList(_list, perm_level='admin')
 
        user_groups_data = []
 
        _tmpl_lookup = app_globals.mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        user_group_name = lambda user_group_id, user_group_name: (
 
            template.get_def("user_group_name")
 
        def user_group_name(user_group_id, user_group_name):
 
            return template.get_def("user_group_name") \
 
            .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
 
        )
 
        user_group_actions = lambda user_group_id, user_group_name: (
 
            template.get_def("user_group_actions")
 

	
 
        def user_group_actions(user_group_id, user_group_name):
 
            return template.get_def("user_group_actions") \
 
            .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c)
 
        )
 

	
 
        for user_gr in group_iter:
 

	
 
            user_groups_data.append({
 
                "raw_name": user_gr.users_group_name,
 
                "group_name": user_group_name(user_gr.users_group_id,
 
                                              user_gr.users_group_name),
 
                "desc": h.escape(user_gr.user_group_description),
 
                "members": len(user_gr.members),
 
                "active": h.boolicon(user_gr.users_group_active),
 
                "owner": h.person(user_gr.owner.username),
 
                "action": user_group_actions(user_gr.users_group_id, user_gr.users_group_name)
 
            })
 

	
 
        c.data = {
 
            "sort": None,
 
            "dir": "asc",
 
            "records": user_groups_data
 
        }
 

	
 
        return render('admin/user_groups/user_groups.html')
 

	
 
    @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
 
    def create(self):
 
        users_group_form = UserGroupForm()()
 
        try:
 
            form_result = users_group_form.to_python(dict(request.POST))
 
            ug = UserGroupModel().create(name=form_result['users_group_name'],
 
                                         description=form_result['user_group_description'],
 
                                         owner=request.authuser.user_id,
 
                                         active=form_result['users_group_active'])
 

	
 
            gr = form_result['users_group_name']
 
            action_logger(request.authuser,
 
                          'admin_created_users_group:%s' % gr,
 
                          None, request.ip_addr)
 
            h.flash(h.HTML(_('Created user group %s')) % h.link_to(gr, url('edit_users_group', id=ug.users_group_id)),
 
                category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            return htmlfill.render(
 
                render('admin/user_groups/user_group_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during creation of user group %s')
 
                    % request.POST.get('users_group_name'), category='error')
 

	
 
        raise HTTPFound(location=url('users_groups'))
 

	
 
    @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
 
    def new(self, format='html'):
 
        return render('admin/user_groups/user_group_add.html')
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def update(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'settings'
 
        self.__load_data(id)
 

	
 
        available_members = [safe_str(x[0]) for x in c.available_members]
 

	
 
        users_group_form = UserGroupForm(edit=True,
 
                                         old_data=c.user_group.get_dict(),
 
                                         available_members=available_members)()
 

	
 
        try:
 
            form_result = users_group_form.to_python(request.POST)
 
            UserGroupModel().update(c.user_group, form_result)
 
            gr = form_result['users_group_name']
 
            action_logger(request.authuser,
 
                          'admin_updated_users_group:%s' % gr,
 
                          None, request.ip_addr)
 
            h.flash(_('Updated user group %s') % gr, category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            ug_model = UserGroupModel()
 
            defaults = errors.value
 
            e = errors.error_dict or {}
 
            defaults.update({
 
                'create_repo_perm': ug_model.has_perm(id,
 
                                                      'hg.create.repository'),
 
                'fork_repo_perm': ug_model.has_perm(id,
 
                                                    'hg.fork.repository'),
 
            })
 

	
 
            return htmlfill.render(
 
                render('admin/user_groups/user_group_edit.html'),
 
                defaults=defaults,
 
                errors=e,
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of user group %s')
 
                    % request.POST.get('users_group_name'), category='error')
 

	
 
        raise HTTPFound(location=url('edit_users_group', id=id))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def delete(self, id):
 
        usr_gr = UserGroup.get_or_404(id)
 
        try:
 
            UserGroupModel().delete(usr_gr)
 
            Session().commit()
 
            h.flash(_('Successfully deleted user group'), category='success')
 
        except UserGroupsAssignedException as e:
 
            h.flash(e, category='error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during deletion of user group'),
 
                    category='error')
 
        raise HTTPFound(location=url('users_groups'))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit(self, id, format='html'):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'settings'
 
        self.__load_data(id)
 

	
 
        defaults = self.__load_defaults(id)
 

	
 
        return htmlfill.render(
 
            render('admin/user_groups/user_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit_perms(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'perms'
 

	
 
        defaults = {}
 
        # fill user group users
 
        for p in c.user_group.user_user_group_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                             p.permission.permission_name})
 

	
 
        for p in c.user_group.user_group_user_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.user_group.users_group_name:
 
                             p.permission.permission_name})
 

	
 
        return htmlfill.render(
 
            render('admin/user_groups/user_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def update_perms(self, id):
 
        """
 
        grant permission for given usergroup
 

	
 
        :param id:
 
        """
 
        user_group = UserGroup.get_or_404(id)
 
        form = UserGroupPermsForm()().to_python(request.POST)
 

	
 
        # set the permissions !
 
        try:
 
            UserGroupModel()._update_permissions(user_group, form['perms_new'],
 
                                                 form['perms_updates'])
 
        except RepoGroupAssignmentError:
 
            h.flash(_('Target group cannot be the same'), category='error')
 
            raise HTTPFound(location=url('edit_user_group_perms', id=id))
 
        # TODO: implement this
 
        #action_logger(request.authuser, 'admin_changed_repo_permissions',
 
        #              repo_name, request.ip_addr)
 
        Session().commit()
 
        h.flash(_('User group permissions updated'), category='success')
 
        raise HTTPFound(location=url('edit_user_group_perms', id=id))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def delete_perms(self, id):
 
        try:
 
            obj_type = request.POST.get('obj_type')
 
            obj_id = None
 
            if obj_type == 'user':
 
                obj_id = safe_int(request.POST.get('user_id'))
 
            elif obj_type == 'user_group':
 
                obj_id = safe_int(request.POST.get('user_group_id'))
 

	
 
            if not request.authuser.is_admin:
 
                if obj_type == 'user' and request.authuser.user_id == obj_id:
 
                    msg = _('Cannot revoke permission for yourself as admin')
 
                    h.flash(msg, category='warning')
 
                    raise Exception('revoke admin permission on self')
 
            if obj_type == 'user':
 
                UserGroupModel().revoke_user_permission(user_group=id,
 
                                                        user=obj_id)
 
            elif obj_type == 'user_group':
 
                UserGroupModel().revoke_user_group_permission(target_user_group=id,
 
                                                              user_group=obj_id)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during revoking of permission'),
 
                    category='error')
 
            raise HTTPInternalServerError()
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit_default_perms(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'default_perms'
 

	
 
        permissions = {
 
            'repositories': {},
 
            'repositories_groups': {}
 
        }
 
        ugroup_repo_perms = UserGroupRepoToPerm.query() \
 
            .options(joinedload(UserGroupRepoToPerm.permission)) \
 
            .options(joinedload(UserGroupRepoToPerm.repository)) \
 
            .filter(UserGroupRepoToPerm.users_group_id == id) \
 
            .all()
 

	
 
        for gr in ugroup_repo_perms:
 
            permissions['repositories'][gr.repository.repo_name]  \
 
                = gr.permission.permission_name
 

	
 
        ugroup_group_perms = UserGroupRepoGroupToPerm.query() \
 
            .options(joinedload(UserGroupRepoGroupToPerm.permission)) \
 
            .options(joinedload(UserGroupRepoGroupToPerm.group)) \
 
            .filter(UserGroupRepoGroupToPerm.users_group_id == id) \
 
            .all()
 

	
 
        for gr in ugroup_group_perms:
 
            permissions['repositories_groups'][gr.group.group_name] \
 
                = gr.permission.permission_name
 
        c.permissions = permissions
 

	
 
        ug_model = UserGroupModel()
 

	
 
        defaults = c.user_group.get_dict()
 
        defaults.update({
 
            'create_repo_perm': ug_model.has_perm(c.user_group,
 
                                                  'hg.create.repository'),
 
            'create_user_group_perm': ug_model.has_perm(c.user_group,
 
                                                        'hg.usergroup.create.true'),
 
            'fork_repo_perm': ug_model.has_perm(c.user_group,
 
                                                'hg.fork.repository'),
 
        })
 

	
 
        return htmlfill.render(
 
            render('admin/user_groups/user_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def update_default_perms(self, id):
 
        user_group = UserGroup.get_or_404(id)
 

	
 
        try:
 
            form = CustomDefaultPermissionsForm()()
 
            form_result = form.to_python(request.POST)
 

	
 
            usergroup_model = UserGroupModel()
 

	
 
            defs = UserGroupToPerm.query() \
 
                .filter(UserGroupToPerm.users_group == user_group) \
 
                .all()
 
            for ug in defs:
 
                Session().delete(ug)
 

	
 
            if form_result['create_repo_perm']:
 
                usergroup_model.grant_perm(id, 'hg.create.repository')
 
            else:
 
                usergroup_model.grant_perm(id, 'hg.create.none')
 
            if form_result['create_user_group_perm']:
 
                usergroup_model.grant_perm(id, 'hg.usergroup.create.true')
 
            else:
 
                usergroup_model.grant_perm(id, 'hg.usergroup.create.false')
 
            if form_result['fork_repo_perm']:
 
                usergroup_model.grant_perm(id, 'hg.fork.repository')
 
            else:
 
                usergroup_model.grant_perm(id, 'hg.fork.none')
 

	
 
            h.flash(_("Updated permissions"), category='success')
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during permissions saving'),
 
                    category='error')
 

	
 
        raise HTTPFound(location=url('edit_user_group_default_perms', id=id))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit_advanced(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'advanced'
 
        c.group_members_obj = sorted((x.user for x in c.user_group.members),
 
                                     key=lambda u: u.username.lower())
 
        return render('admin/user_groups/user_group_edit.html')
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit_members(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'members'
 
        c.group_members_obj = sorted((x.user for x in c.user_group.members),
 
                                     key=lambda u: u.username.lower())
 

	
 
        c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
 
        return render('admin/user_groups/user_group_edit.html')
kallithea/controllers/admin/users.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.users
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Users crud controller
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 

	
 
import formencode
 
from formencode import htmlfill
 
from sqlalchemy.sql.expression import func
 
from tg import app_globals, request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib import auth_modules
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired
 
from kallithea.lib.base import BaseController, IfSshEnabled, render
 
from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.utils2 import datetime_to_time, generate_api_key, safe_int
 
from kallithea.model.api_key import ApiKeyModel
 
from kallithea.model.db import User, UserEmailMap, UserIpMap, UserToPerm
 
from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm
 
from kallithea.model.meta import Session
 
from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException
 
from kallithea.model.user import UserModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UsersController(BaseController):
 
    """REST Controller styled on the Atom Publishing Protocol"""
 

	
 
    @LoginRequired()
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def _before(self, *args, **kwargs):
 
        super(UsersController, self)._before(*args, **kwargs)
 

	
 
    def index(self, format='html'):
 
        c.users_list = User.query().order_by(User.username) \
 
                        .filter_by(is_default_user=False) \
 
                        .order_by(func.lower(User.username)) \
 
                        .all()
 

	
 
        users_data = []
 
        _tmpl_lookup = app_globals.mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        grav_tmpl = '<div class="gravatar">%s</div>'
 

	
 
        username = lambda user_id, username: (
 
                template.get_def("user_name")
 
                .render_unicode(user_id, username, _=_, h=h, c=c))
 
        def username(user_id, username):
 
            return template.get_def("user_name") \
 
                .render_unicode(user_id, username, _=_, h=h, c=c)
 

	
 
        user_actions = lambda user_id, username: (
 
                template.get_def("user_actions")
 
                .render_unicode(user_id, username, _=_, h=h, c=c))
 
        def user_actions(user_id, username):
 
            return template.get_def("user_actions") \
 
                .render_unicode(user_id, username, _=_, h=h, c=c)
 

	
 
        for user in c.users_list:
 
            users_data.append({
 
                "gravatar": grav_tmpl % h.gravatar(user.email, size=20),
 
                "raw_name": user.username,
 
                "username": username(user.user_id, user.username),
 
                "firstname": h.escape(user.name),
 
                "lastname": h.escape(user.lastname),
 
                "last_login": h.fmt_date(user.last_login),
 
                "last_login_raw": datetime_to_time(user.last_login),
 
                "active": h.boolicon(user.active),
 
                "admin": h.boolicon(user.admin),
 
                "extern_type": user.extern_type,
 
                "extern_name": user.extern_name,
 
                "action": user_actions(user.user_id, user.username),
 
            })
 

	
 
        c.data = {
 
            "sort": None,
 
            "dir": "asc",
 
            "records": users_data
 
        }
 

	
 
        return render('admin/users/users.html')
 

	
 
    def create(self):
 
        c.default_extern_type = User.DEFAULT_AUTH_TYPE
 
        c.default_extern_name = ''
 
        user_model = UserModel()
 
        user_form = UserForm()()
 
        try:
 
            form_result = user_form.to_python(dict(request.POST))
 
            user = user_model.create(form_result)
 
            action_logger(request.authuser, 'admin_created_user:%s' % user.username,
 
                          None, request.ip_addr)
 
            h.flash(_('Created user %s') % user.username,
 
                    category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            return htmlfill.render(
 
                render('admin/users/user_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except UserCreationError as e:
 
            h.flash(e, 'error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during creation of user %s')
 
                    % request.POST.get('username'), category='error')
 
        raise HTTPFound(location=url('edit_user', id=user.user_id))
 

	
 
    def new(self, format='html'):
 
        c.default_extern_type = User.DEFAULT_AUTH_TYPE
 
        c.default_extern_name = ''
 
        return render('admin/users/user_add.html')
 

	
 
    def update(self, id):
 
        user_model = UserModel()
 
        user = user_model.get(id)
 
        _form = UserForm(edit=True, old_data={'user_id': id,
 
                                              'email': user.email})()
 
        form_result = {}
 
        try:
 
            form_result = _form.to_python(dict(request.POST))
 
            skip_attrs = ['extern_type', 'extern_name',
 
                         ] + auth_modules.get_managed_fields(user)
 

	
 
            user_model.update(id, form_result, skip_attrs=skip_attrs)
 
            usr = form_result['username']
 
            action_logger(request.authuser, 'admin_updated_user:%s' % usr,
 
                          None, request.ip_addr)
 
            h.flash(_('User updated successfully'), category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            defaults = errors.value
 
            e = errors.error_dict or {}
 
            defaults.update({
 
                'create_repo_perm': user_model.has_perm(id,
 
                                                        'hg.create.repository'),
 
                'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'),
 
            })
 
            return htmlfill.render(
 
                self._render_edit_profile(user),
 
                defaults=defaults,
 
                errors=e,
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of user %s')
 
                    % form_result.get('username'), category='error')
 
        raise HTTPFound(location=url('edit_user', id=id))
 

	
 
    def delete(self, id):
 
        usr = User.get_or_404(id)
 
        try:
 
            UserModel().delete(usr)
 
            Session().commit()
 
            h.flash(_('Successfully deleted user'), category='success')
 
        except (UserOwnsReposException, DefaultUserException) as e:
 
            h.flash(e, category='warning')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during deletion of user'),
 
                    category='error')
 
        raise HTTPFound(location=url('users'))
 

	
 
    def _get_user_or_raise_if_default(self, id):
 
        try:
 
            return User.get_or_404(id, allow_default=False)
 
        except DefaultUserException:
 
            h.flash(_("The default user cannot be edited"), category='warning')
 
            raise HTTPNotFound
 

	
 
    def _render_edit_profile(self, user):
 
        c.user = user
 
        c.active = 'profile'
 
        c.perm_user = AuthUser(dbuser=user)
 
        managed_fields = auth_modules.get_managed_fields(user)
 
        c.readonly = lambda n: 'readonly' if n in managed_fields else None
 
        return render('admin/users/user_edit.html')
 

	
 
    def edit(self, id, format='html'):
 
        user = self._get_user_or_raise_if_default(id)
 
        defaults = user.get_dict()
 

	
 
        return htmlfill.render(
 
            self._render_edit_profile(user),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def edit_advanced(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 
        c.active = 'advanced'
 
        c.perm_user = AuthUser(dbuser=c.user)
 

	
 
        umodel = UserModel()
 
        defaults = c.user.get_dict()
 
        defaults.update({
 
            'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'),
 
            'create_user_group_perm': umodel.has_perm(c.user,
 
                                                      'hg.usergroup.create.true'),
 
            'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'),
 
        })
 
        return htmlfill.render(
 
            render('admin/users/user_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def edit_api_keys(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 
        c.active = 'api_keys'
 
        show_expired = True
 
        c.lifetime_values = [
 
            (str(-1), _('Forever')),
 
            (str(5), _('5 minutes')),
 
            (str(60), _('1 hour')),
 
            (str(60 * 24), _('1 day')),
 
            (str(60 * 24 * 30), _('1 month')),
 
        ]
 
        c.lifetime_options = [(c.lifetime_values, _("Lifetime"))]
 
        c.user_api_keys = ApiKeyModel().get_api_keys(c.user.user_id,
 
                                                     show_expired=show_expired)
 
        defaults = c.user.get_dict()
 
        return htmlfill.render(
 
            render('admin/users/user_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def add_api_key(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 

	
 
        lifetime = safe_int(request.POST.get('lifetime'), -1)
 
        description = request.POST.get('description')
 
        ApiKeyModel().create(c.user.user_id, description, lifetime)
 
        Session().commit()
 
        h.flash(_("API key successfully created"), category='success')
 
        raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id))
 

	
 
    def delete_api_key(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 

	
 
        api_key = request.POST.get('del_api_key')
 
        if request.POST.get('del_api_key_builtin'):
 
            c.user.api_key = generate_api_key()
 
            Session().commit()
 
            h.flash(_("API key successfully reset"), category='success')
 
        elif api_key:
 
            ApiKeyModel().delete(api_key, c.user.user_id)
 
            Session().commit()
 
            h.flash(_("API key successfully deleted"), category='success')
 

	
 
        raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id))
 

	
 
    def update_account(self, id):
 
        pass
 

	
 
    def edit_perms(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 
        c.active = 'perms'
 
        c.perm_user = AuthUser(dbuser=c.user)
 

	
 
        umodel = UserModel()
 
        defaults = c.user.get_dict()
 
        defaults.update({
 
            'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'),
 
            'create_user_group_perm': umodel.has_perm(c.user,
 
                                                      'hg.usergroup.create.true'),
 
            'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'),
 
        })
 
        return htmlfill.render(
 
            render('admin/users/user_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def update_perms(self, id):
 
        user = self._get_user_or_raise_if_default(id)
 

	
 
        try:
 
            form = CustomDefaultPermissionsForm()()
 
            form_result = form.to_python(request.POST)
 

	
 
            user_model = UserModel()
 

	
 
            defs = UserToPerm.query() \
 
                .filter(UserToPerm.user == user) \
 
                .all()
 
            for ug in defs:
 
                Session().delete(ug)
 

	
 
            if form_result['create_repo_perm']:
 
                user_model.grant_perm(id, 'hg.create.repository')
 
            else:
 
                user_model.grant_perm(id, 'hg.create.none')
 
            if form_result['create_user_group_perm']:
 
                user_model.grant_perm(id, 'hg.usergroup.create.true')
 
            else:
 
                user_model.grant_perm(id, 'hg.usergroup.create.false')
 
            if form_result['fork_repo_perm']:
 
                user_model.grant_perm(id, 'hg.fork.repository')
 
            else:
 
                user_model.grant_perm(id, 'hg.fork.none')
 
            h.flash(_("Updated permissions"), category='success')
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during permissions saving'),
 
                    category='error')
 
        raise HTTPFound(location=url('edit_user_perms', id=id))
 

	
 
    def edit_emails(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 
        c.active = 'emails'
 
        c.user_email_map = UserEmailMap.query() \
 
            .filter(UserEmailMap.user == c.user).all()
 

	
 
        defaults = c.user.get_dict()
 
        return htmlfill.render(
 
            render('admin/users/user_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def add_email(self, id):
 
        user = self._get_user_or_raise_if_default(id)
 
        email = request.POST.get('new_email')
 
        user_model = UserModel()
 

	
 
        try:
 
            user_model.add_extra_email(id, email)
 
            Session().commit()
 
            h.flash(_("Added email %s to user") % email, category='success')
 
        except formencode.Invalid as error:
 
            msg = error.error_dict['email']
 
            h.flash(msg, category='error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during email saving'),
 
                    category='error')
 
        raise HTTPFound(location=url('edit_user_emails', id=id))
 

	
 
    def delete_email(self, id):
 
        user = self._get_user_or_raise_if_default(id)
 
        email_id = request.POST.get('del_email_id')
 
        user_model = UserModel()
 
        user_model.delete_extra_email(id, email_id)
 
        Session().commit()
 
        h.flash(_("Removed email from user"), category='success')
 
        raise HTTPFound(location=url('edit_user_emails', id=id))
 

	
 
    def edit_ips(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 
        c.active = 'ips'
 
        c.user_ip_map = UserIpMap.query() \
 
            .filter(UserIpMap.user == c.user).all()
 

	
 
        c.default_user_ip_map = UserIpMap.query() \
 
            .filter(UserIpMap.user == User.get_default_user()).all()
 

	
 
        defaults = c.user.get_dict()
 
        return htmlfill.render(
 
            render('admin/users/user_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def add_ip(self, id):
 
        ip = request.POST.get('new_ip')
 
        user_model = UserModel()
 

	
 
        try:
 
            user_model.add_extra_ip(id, ip)
 
            Session().commit()
 
            h.flash(_("Added IP address %s to user whitelist") % ip, category='success')
 
        except formencode.Invalid as error:
 
            msg = error.error_dict['ip']
 
            h.flash(msg, category='error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred while adding IP address'),
 
                    category='error')
 

	
 
        if 'default_user' in request.POST:
 
            raise HTTPFound(location=url('admin_permissions_ips'))
 
        raise HTTPFound(location=url('edit_user_ips', id=id))
 

	
 
    def delete_ip(self, id):
 
        ip_id = request.POST.get('del_ip_id')
 
        user_model = UserModel()
 
        user_model.delete_extra_ip(id, ip_id)
 
        Session().commit()
 
        h.flash(_("Removed IP address from user whitelist"), category='success')
 

	
 
        if 'default_user' in request.POST:
 
            raise HTTPFound(location=url('admin_permissions_ips'))
 
        raise HTTPFound(location=url('edit_user_ips', id=id))
 

	
 
    @IfSshEnabled
 
    def edit_ssh_keys(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 
        c.active = 'ssh_keys'
 
        c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id)
 
        defaults = c.user.get_dict()
 
        return htmlfill.render(
 
            render('admin/users/user_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @IfSshEnabled
 
    def ssh_keys_add(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 

	
 
        description = request.POST.get('description')
 
        public_key = request.POST.get('public_key')
 
        try:
 
            new_ssh_key = SshKeyModel().create(c.user.user_id,
 
                                               description, public_key)
 
            Session().commit()
 
            SshKeyModel().write_authorized_keys()
 
            h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success')
 
        except SshKeyModelException as e:
 
            h.flash(e.args[0], category='error')
 
        raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
 

	
 
    @IfSshEnabled
 
    def ssh_keys_delete(self, id):
 
        c.user = self._get_user_or_raise_if_default(id)
 

	
 
        fingerprint = request.POST.get('del_public_key_fingerprint')
 
        try:
 
            SshKeyModel().delete(fingerprint, c.user.user_id)
 
            Session().commit()
 
            SshKeyModel().write_authorized_keys()
 
            h.flash(_("SSH key successfully deleted"), category='success')
 
        except SshKeyModelException as e:
 
            h.flash(e.args[0], category='error')
 
        raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id))
kallithea/lib/auth_modules/auth_ldap.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth_modules.auth_ldap
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea authentication plugin for LDAP
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Created on Nov 17, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 

	
 
from kallithea.lib import auth_modules
 
from kallithea.lib.compat import hybrid_property
 
from kallithea.lib.exceptions import LdapConnectionError, LdapImportError, LdapPasswordError, LdapUsernameError
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
try:
 
    import ldap
 
    import ldap.filter
 
except ImportError:
 
    # means that python-ldap is not installed
 
    ldap = None
 

	
 

	
 
class AuthLdap(object):
 

	
 
    def __init__(self, server, base_dn, port=None, bind_dn='', bind_pass='',
 
                 tls_kind='LDAPS', tls_reqcert='DEMAND', cacertdir=None, ldap_version=3,
 
                 ldap_filter='(&(objectClass=user)(!(objectClass=computer)))',
 
                 search_scope='SUBTREE', attr_login='uid'):
 
        if ldap is None:
 
            raise LdapImportError
 

	
 
        self.ldap_version = ldap_version
 

	
 
        self.TLS_KIND = tls_kind
 
        OPT_X_TLS_DEMAND = 2
 
        self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert,
 
                                   OPT_X_TLS_DEMAND)
 
        self.cacertdir = cacertdir
 

	
 
        protocol = 'ldaps' if self.TLS_KIND == 'LDAPS' else 'ldap'
 
        if not port:
 
            port = 636 if self.TLS_KIND == 'LDAPS' else 389
 
        self.LDAP_SERVER = str(', '.join(
 
            "%s://%s:%s" % (protocol,
 
                            host.strip(),
 
                            port)
 
            for host in server.split(',')))
 

	
 
        self.LDAP_BIND_DN = bind_dn
 
        self.LDAP_BIND_PASS = bind_pass
 

	
 
        self.BASE_DN = base_dn
 
        self.LDAP_FILTER = ldap_filter
 
        self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope)
 
        self.attr_login = attr_login
 

	
 
    def authenticate_ldap(self, username, password):
 
        """
 
        Authenticate a user via LDAP and return his/her LDAP properties.
 

	
 
        Raises AuthenticationError if the credentials are rejected, or
 
        EnvironmentError if the LDAP server can't be reached.
 

	
 
        :param username: username
 
        :param password: password
 
        """
 

	
 
        if not password:
 
            log.debug("Attempt to authenticate LDAP user "
 
                      "with blank password rejected.")
 
            raise LdapPasswordError()
 
        if "," in username:
 
            raise LdapUsernameError("invalid character in username: ,")
 
        try:
 
            if self.cacertdir:
 
                if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'):
 
                    ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, self.cacertdir)
 
                else:
 
                    log.debug("OPT_X_TLS_CACERTDIR is not available - can't set %s", self.cacertdir)
 
            ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF)
 
            ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON)
 
            ldap.set_option(ldap.OPT_TIMEOUT, 20)
 
            ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10)
 
            ldap.set_option(ldap.OPT_TIMELIMIT, 15)
 
            if self.TLS_KIND != 'PLAIN':
 
                ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, self.TLS_REQCERT)
 
            server = ldap.initialize(self.LDAP_SERVER)
 
            if self.ldap_version == 2:
 
                server.protocol = ldap.VERSION2
 
            else:
 
                server.protocol = ldap.VERSION3
 

	
 
            if self.TLS_KIND == 'START_TLS':
 
                server.start_tls_s()
 

	
 
            if self.LDAP_BIND_DN and self.LDAP_BIND_PASS:
 
                log.debug('Trying simple_bind with password and given DN: %s',
 
                          self.LDAP_BIND_DN)
 
                server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS)
 

	
 
            filter_ = '(&%s(%s=%s))' % (self.LDAP_FILTER,
 
                                        ldap.filter.escape_filter_chars(self.attr_login),
 
                                        ldap.filter.escape_filter_chars(username))
 
            log.debug("Authenticating %r filter %s at %s", self.BASE_DN,
 
                      filter_, self.LDAP_SERVER)
 
            lobjects = server.search_ext_s(self.BASE_DN, self.SEARCH_SCOPE,
 
                                           filter_)
 

	
 
            if not lobjects:
 
                raise ldap.NO_SUCH_OBJECT()
 

	
 
            for (dn, _attrs) in lobjects:
 
                if dn is None:
 
                    continue
 

	
 
                try:
 
                    log.debug('Trying simple bind with %s', dn)
 
                    server.simple_bind_s(dn, password)
 
                    results = server.search_ext_s(dn, ldap.SCOPE_BASE,
 
                                                  '(objectClass=*)')
 
                    if len(results) == 1:
 
                        dn_, attrs = results[0]
 
                        assert dn_ == dn
 
                        return dn, attrs
 

	
 
                except ldap.INVALID_CREDENTIALS:
 
                    log.debug("LDAP rejected password for user '%s': %s",
 
                              username, dn)
 
                    continue # accept authentication as another ldap user with same username
 

	
 
            log.debug("No matching LDAP objects for authentication "
 
                      "of '%s'", username)
 
            raise LdapPasswordError()
 

	
 
        except ldap.NO_SUCH_OBJECT:
 
            log.debug("LDAP says no such user '%s'", username)
 
            raise LdapUsernameError()
 
        except ldap.SERVER_DOWN:
 
            # [0] might be {'info': "TLS error -8179:Peer's Certificate issuer is not recognized.", 'desc': "Can't contact LDAP server"}
 
            raise LdapConnectionError("LDAP can't connect to authentication server")
 

	
 

	
 
class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin):
 
    def __init__(self):
 
        self._logger = logging.getLogger(__name__)
 
        self._tls_kind_values = ["PLAIN", "LDAPS", "START_TLS"]
 
        self._tls_reqcert_values = ["NEVER", "ALLOW", "TRY", "DEMAND", "HARD"]
 
        self._search_scopes = ["BASE", "ONELEVEL", "SUBTREE"]
 

	
 
    @hybrid_property
 
    def name(self):
 
        return "ldap"
 

	
 
    def settings(self):
 
        settings = [
 
            {
 
                "name": "host",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "Host of the LDAP Server",
 
                "formname": "LDAP Host"
 
            },
 
            {
 
                "name": "port",
 
                "validator": self.validators.Number(strip=True),
 
                "type": "string",
 
                "description": "Port that the LDAP server is listening on. Defaults to 389 for PLAIN/START_TLS and 636 for LDAPS.",
 
                "default": "",
 
                "formname": "Custom LDAP Port"
 
            },
 
            {
 
                "name": "dn_user",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "User to connect to LDAP",
 
                "formname": "Account"
 
            },
 
            {
 
                "name": "dn_pass",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "password",
 
                "description": "Password to connect to LDAP",
 
                "formname": "Password"
 
            },
 
            {
 
                "name": "tls_kind",
 
                "validator": self.validators.OneOf(self._tls_kind_values),
 
                "type": "select",
 
                "values": self._tls_kind_values,
 
                "description": "TLS Type",
 
                "default": 'LDAPS',
 
                "formname": "Connection Security"
 
            },
 
            {
 
                "name": "tls_reqcert",
 
                "validator": self.validators.OneOf(self._tls_reqcert_values),
 
                "type": "select",
 
                "values": self._tls_reqcert_values,
 
                "description": "Require Cert over TLS?",
 
                "formname": "Certificate Checks"
 
            },
 
            {
 
                "name": "cacertdir",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "Optional: Custom CA certificate directory for validating LDAPS",
 
                "formname": "Custom CA Certificates"
 
            },
 
            {
 
                "name": "base_dn",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "Base DN to search (e.g., dc=mydomain,dc=com)",
 
                "formname": "Base DN"
 
            },
 
            {
 
                "name": "filter",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "Filter to narrow results (e.g., ou=Users, etc)",
 
                "formname": "LDAP Search Filter"
 
            },
 
            {
 
                "name": "search_scope",
 
                "validator": self.validators.OneOf(self._search_scopes),
 
                "type": "select",
 
                "values": self._search_scopes,
 
                "description": "How deep to search LDAP",
 
                "formname": "LDAP Search Scope"
 
            },
 
            {
 
                "name": "attr_login",
 
                "validator": self.validators.AttrLoginValidator(not_empty=True, strip=True),
 
                "type": "string",
 
                "description": "LDAP Attribute to map to user name",
 
                "formname": "Login Attribute"
 
            },
 
            {
 
                "name": "attr_firstname",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "LDAP Attribute to map to first name",
 
                "formname": "First Name Attribute"
 
            },
 
            {
 
                "name": "attr_lastname",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "LDAP Attribute to map to last name",
 
                "formname": "Last Name Attribute"
 
            },
 
            {
 
                "name": "attr_email",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "LDAP Attribute to map to email address",
 
                "formname": "Email Attribute"
 
            }
 
        ]
 
        return settings
 

	
 
    def use_fake_password(self):
 
        return True
 

	
 
    def auth(self, userobj, username, password, settings, **kwargs):
 
        """
 
        Given a user object (which may be null), username, a plaintext password,
 
        and a settings object (containing all the keys needed as listed in settings()),
 
        authenticate this user's login attempt.
 

	
 
        Return None on failure. On success, return a dictionary of the form:
 

	
 
            see: KallitheaAuthPluginBase.auth_func_attrs
 
        This is later validated for correctness
 
        """
 

	
 
        if not username or not password:
 
            log.debug('Empty username or password skipping...')
 
            return None
 

	
 
        kwargs = {
 
            'server': settings.get('host', ''),
 
            'base_dn': settings.get('base_dn', ''),
 
            'port': settings.get('port'),
 
            'bind_dn': settings.get('dn_user'),
 
            'bind_pass': settings.get('dn_pass'),
 
            'tls_kind': settings.get('tls_kind'),
 
            'tls_reqcert': settings.get('tls_reqcert'),
 
            'cacertdir': settings.get('cacertdir'),
 
            'ldap_filter': settings.get('filter'),
 
            'search_scope': settings.get('search_scope'),
 
            'attr_login': settings.get('attr_login'),
 
            'ldap_version': 3,
 
        }
 

	
 
        if kwargs['bind_dn'] and not kwargs['bind_pass']:
 
            log.debug('Using dynamic binding.')
 
            kwargs['bind_dn'] = kwargs['bind_dn'].replace('$login', username)
 
            kwargs['bind_pass'] = password
 
        log.debug('Checking for ldap authentication')
 

	
 
        try:
 
            aldap = AuthLdap(**kwargs)
 
            (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password)
 
            log.debug('Got ldap DN response %s', user_dn)
 

	
 
            get_ldap_attr = lambda k: ldap_attrs.get(settings.get(k), [''])[0]
 
            def get_ldap_attr(k):
 
                return ldap_attrs.get(settings.get(k), [''])[0]
 

	
 
            # old attrs fetched from Kallithea database
 
            admin = getattr(userobj, 'admin', False)
 
            email = getattr(userobj, 'email', '')
 
            firstname = getattr(userobj, 'firstname', '')
 
            lastname = getattr(userobj, 'lastname', '')
 

	
 
            user_data = {
 
                'username': username,
 
                'firstname': get_ldap_attr('attr_firstname') or firstname,
 
                'lastname': get_ldap_attr('attr_lastname') or lastname,
 
                'groups': [],
 
                'email': get_ldap_attr('attr_email') or email,
 
                'admin': admin,
 
                'extern_name': user_dn,
 
            }
 
            log.info('user %s authenticated correctly', user_data['username'])
 
            return user_data
 

	
 
        except LdapUsernameError:
 
            log.info('Error authenticating %s with LDAP: User not found', username)
 
        except LdapPasswordError:
 
            log.info('Error authenticating %s with LDAP: Password error', username)
 
        except LdapImportError:
 
            log.error('Error authenticating %s with LDAP: LDAP not available', username)
 
        return None
 

	
 
    def get_managed_fields(self):
 
        return ['username', 'firstname', 'lastname', 'email', 'password']
kallithea/lib/celerylib/tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.celerylib.tasks
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea task modules, containing all task that suppose to be run
 
by celery daemon
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 6, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import email.utils
 
import os
 
import traceback
 
from collections import OrderedDict
 
from operator import itemgetter
 
from time import mktime
 

	
 
import celery.utils.log
 
from tg import config
 

	
 
import kallithea
 
from kallithea.lib import celerylib, ext_json
 
from kallithea.lib.helpers import person
 
from kallithea.lib.hooks import log_create_repository
 
from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.utils2 import ascii_bytes, str2bool
 
from kallithea.lib.vcs.utils import author_email
 
from kallithea.model.db import RepoGroup, Repository, Statistics, User
 

	
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
 

	
 

	
 
log = celery.utils.log.get_task_logger(__name__)
 

	
 

	
 
@celerylib.task
 
@celerylib.locked_task
 
@celerylib.dbsession
 
def whoosh_index(repo_location, full_index):
 
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
    celerylib.get_session() # initialize database connection
 

	
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location) \
 
                         .run(full_index=full_index)
 

	
 

	
 
# for js data compatibility cleans the key for person from '
 
def akc(k):
 
    return person(k).replace('"', '')
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
 
    DBS = celerylib.get_session()
 
    lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir']  # Backward compatibility for TurboGears < 2.4
 

	
 
    log.info('running task with lockkey %s', lockkey)
 

	
 
    try:
 
        lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey))
 

	
 
        # for js data compatibility cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is None:
 
            return True
 

	
 
        repo = repo.scm_instance
 
        repo_size = repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config.get('commit_parse_limit'))
 
        last_rev = None
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        dbrepo = DBS.query(Repository) \
 
            .filter(Repository.repo_name == repo_name).scalar()
 
        cur_stats = DBS.query(Statistics) \
 
            .filter(Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == repo.get_changeset().revision and repo_size > 1:
 
            # pass silently without any work if we're not on first revision or
 
            # current state of parsing revision(from db marker) is the
 
            # last revision
 
            lock.release()
 
            return True
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(ext_json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s', parse_limit)
 

	
 
        last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
 
        log.debug('Getting revisions from %s to %s',
 
             last_rev, last_rev + parse_limit
 
        )
 
        for cs in repo[last_rev:last_rev + parse_limit]:
 
            log.debug('parsing %s', cs)
 
            last_cs = cs  # remember last parsed changeset
 
            tt = cs.date.timetuple()
 
            k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))
 

	
 
            if akc(cs.author) in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[akc(cs.author)]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = None
 

	
 
                if time_pos is not None and time_pos >= 0:
 
                    datadict = \
 
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data'] \
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time": k,
 
                                                 "commits": 1,
 
                                                 "added": len(cs.added),
 
                                                 "changed": len(cs.changed),
 
                                                 "removed": len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            # gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
 
        stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data))
 

	
 
        log.debug('last revision %s', last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s', leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name)))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            DBS.add(stats)
 
            DBS.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            DBS.rollback()
 
            lock.release()
 
            return False
 

	
 
        # final release
 
        lock.release()
 

	
 
        # execute another task if celery is enabled
 
        if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0:
 
            get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
 
        elif recurse_limit <= 0:
 
            log.debug('Not recursing - limit has been reached')
 
        else:
 
            log.debug('Not recursing')
 
    except celerylib.LockHeld:
 
        log.info('Task with key %s already running', lockkey)
 
        return 'Task with key %s already running' % lockkey
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def send_email(recipients, subject, body='', html_body='', headers=None, author=None):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, if this is None, the defined email
 
        address from field 'email_to' and all admins is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    :param html_body: html version of body
 
    :param headers: dictionary of prepopulated e-mail headers
 
    :param author: User object of the author of this mail, if known and relevant
 
    """
 
    assert isinstance(recipients, list), recipients
 
    if headers is None:
 
        headers = {}
 
    else:
 
        # do not modify the original headers object passed by the caller
 
        headers = headers.copy()
 

	
 
    email_config = config
 
    email_prefix = email_config.get('email_prefix', '')
 
    if email_prefix:
 
        subject = "%s %s" % (email_prefix, subject)
 

	
 
    if not recipients:
 
        # if recipients are not defined we send to email_config + all admins
 
        recipients = [u.email for u in User.query()
 
                      .filter(User.admin == True).all()]
 
        if email_config.get('email_to') is not None:
 
            recipients += email_config.get('email_to').split(',')
 

	
 
        # If there are still no recipients, there are no admins and no address
 
        # configured in email_to, so return.
 
        if not recipients:
 
            log.error("No recipients specified and no fallback available.")
 
            return False
 

	
 
        log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
 

	
 
    # SMTP sender
 
    envelope_from = email_config.get('app_email_from', 'Kallithea')
 
    # 'From' header
 
    if author is not None:
 
        # set From header based on author but with a generic e-mail address
 
        # In case app_email_from is in "Some Name <e-mail>" format, we first
 
        # extract the e-mail address.
 
        envelope_addr = author_email(envelope_from)
 
        headers['From'] = '"%s" <%s>' % (
 
            email.utils.quote('%s (no-reply)' % author.full_name_or_username),
 
            envelope_addr)
 

	
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(email_config.get('debug'))
 
    smtp_auth = email_config.get('smtp_auth')
 

	
 
    logmsg = ("Mail details:\n"
 
              "recipients: %s\n"
 
              "headers: %s\n"
 
              "subject: %s\n"
 
              "body:\n%s\n"
 
              "html:\n%s\n"
 
              % (' '.join(recipients), headers, subject, body, html_body))
 

	
 
    if mail_server:
 
        log.debug("Sending e-mail. " + logmsg)
 
    else:
 
        log.error("SMTP mail server not configured - cannot send e-mail.")
 
        log.warning(logmsg)
 
        return False
 

	
 
    try:
 
        m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body, html_body, headers=headers)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def create_repo(form_data, cur_user):
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.db import Setting
 

	
 
    DBS = celerylib.get_session()
 

	
 
    cur_user = User.guess_instance(cur_user)
 

	
 
    owner = cur_user
 
    repo_name = form_data['repo_name']
 
    repo_name_full = form_data['repo_name_full']
 
    repo_type = form_data['repo_type']
 
    description = form_data['repo_description']
 
    private = form_data['repo_private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['repo_landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 
    copy_group_permissions = form_data.get('repo_copy_permissions')
 
    fork_of = form_data.get('fork_parent_id')
 
    state = form_data.get('repo_state', Repository.STATE_PENDING)
 

	
 
    # repo creation defaults, private and repo_type are filled in form
 
    defs = Setting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_downloads = defs.get('repo_enable_downloads')
 

	
 
    try:
 
        repo = RepoModel()._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=description,
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions,
 
            copy_group_permissions=copy_group_permissions,
 
            enable_statistics=enable_statistics,
 
            enable_downloads=enable_downloads,
 
            state=state
 
        )
 

	
 
        action_logger(cur_user, 'user_created_repo',
 
                      form_data['repo_name_full'], '')
 

	
 
        DBS.commit()
 
        # now create this repo on Filesystem
 
        RepoModel()._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoGroup.guess_instance(repo_group),
 
            clone_uri=clone_uri,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception as e:
 
        log.warning('Exception %s occurred when forking repository, '
 
                    'doing cleanup...' % e)
 
        # rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel()._delete_filesystem_repo(repo)
 
        raise
 

	
 
    return True
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def create_repo_fork(form_data, cur_user):
 
    """
 
    Creates a fork of repository using interval VCS methods
 

	
 
    :param form_data:
 
    :param cur_user:
 
    """
 
    from kallithea.model.repo import RepoModel
 

	
 
    DBS = celerylib.get_session()
 

	
 
    base_path = Repository.base_path()
 
    cur_user = User.guess_instance(cur_user)
 

	
 
    repo_name = form_data['repo_name']  # fork in this case
 
    repo_name_full = form_data['repo_name_full']
 

	
 
    repo_type = form_data['repo_type']
 
    owner = cur_user
 
    private = form_data['private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 

	
 
    try:
 
        fork_of = Repository.guess_instance(form_data.get('fork_parent_id'))
 

	
 
        RepoModel()._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=form_data['description'],
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions
 
        )
 
        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
 
                      fork_of.repo_name, '')
 
        DBS.commit()
 

	
 
        source_repo_path = os.path.join(base_path, fork_of.repo_name)
 

	
 
        # now create this repo on Filesystem
 
        RepoModel()._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoGroup.guess_instance(repo_group),
 
            clone_uri=source_repo_path,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception as e:
 
        log.warning('Exception %s occurred when forking repository, '
 
                    'doing cleanup...' % e)
 
        # rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel()._delete_filesystem_repo(repo)
 
        raise
 

	
 
    return True
 

	
 

	
 
def __get_codes_stats(repo_name):
 
    from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP
 
    repo = Repository.get_by_repo_name(repo_name).scm_instance
 

	
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    for _topnode, _dirnodes, filenodes in tip.walk('/'):
 
        for filenode in filenodes:
 
            ext = filenode.extension.lower()
 
            if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary:
 
                if ext in code_stats:
 
                    code_stats[ext] += 1
 
                else:
 
                    code_stats[ext] = 1
 

	
 
    return code_stats or {}
kallithea/lib/helpers.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Helper functions
 

	
 
Consists of functions to typically be used within templates, but also
 
available to Controllers. This module is available to both as 'h'.
 
"""
 
import hashlib
 
import json
 
import logging
 
import random
 
import re
 
import textwrap
 
import urllib.parse
 

	
 
from beaker.cache import cache_region
 
from pygments import highlight as code_highlight
 
from pygments.formatters.html import HtmlFormatter
 
from tg.i18n import ugettext as _
 
from webhelpers2.html import HTML, escape, literal
 
from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form
 
from webhelpers2.html.tags import form as insecure_form
 
from webhelpers2.html.tags import hidden, link_to, password, radio
 
from webhelpers2.html.tags import select as webhelpers2_select
 
from webhelpers2.html.tags import submit, text, textarea
 
from webhelpers2.number import format_byte_size
 
from webhelpers2.text import chop_at, truncate, wrap_paragraphs
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib.annotate import annotate_highlight
 
#==============================================================================
 
# PERMS
 
#==============================================================================
 
from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel
 
from kallithea.lib.markup_renderer import url_re
 
from kallithea.lib.pygmentsutils import get_custom_lexer
 
from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict
 
from kallithea.lib.utils2 import age as _age
 
from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 
#==============================================================================
 
# SCM FILTERS available via h.
 
#==============================================================================
 
from kallithea.lib.vcs.utils import author_email, author_name
 

	
 

	
 
# mute pyflakes "imported but unused"
 
assert Option
 
assert checkbox
 
assert end_form
 
assert password
 
assert radio
 
assert submit
 
assert text
 
assert textarea
 
assert format_byte_size
 
assert chop_at
 
assert wrap_paragraphs
 
assert HasPermissionAny
 
assert HasRepoGroupPermissionLevel
 
assert HasRepoPermissionLevel
 
assert time_to_datetime
 
assert EmptyChangeset
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def canonical_url(*args, **kargs):
 
    '''Like url(x, qualified=True), but returns url that not only is qualified
 
    but also canonical, as configured in canonical_url'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        kargs['host'] = parts[1]
 
        kargs['protocol'] = parts[0]
 
    except IndexError:
 
        kargs['qualified'] = True
 
    return url(*args, **kargs)
 

	
 

	
 
def canonical_hostname():
 
    '''Return canonical hostname of system'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        return parts[1].split('/', 1)[0]
 
    except IndexError:
 
        parts = url('home', qualified=True).split('://', 1)
 
        return parts[1].split('/', 1)[0]
 

	
 

	
 
def html_escape(s):
 
    """Return string with all html escaped.
 
    This is also safe for javascript in html but not necessarily correct.
 
    """
 
    return (s
 
        .replace('&', '&amp;')
 
        .replace(">", "&gt;")
 
        .replace("<", "&lt;")
 
        .replace('"', "&quot;")
 
        .replace("'", "&apos;") # Note: this is HTML5 not HTML4 and might not work in mails
 
        )
 

	
 
def js(value):
 
    """Convert Python value to the corresponding JavaScript representation.
 

	
 
    This is necessary to safely insert arbitrary values into HTML <script>
 
    sections e.g. using Mako template expression substitution.
 

	
 
    Note: Rather than using this function, it's preferable to avoid the
 
    insertion of values into HTML <script> sections altogether. Instead,
 
    data should (to the extent possible) be passed to JavaScript using
 
    data attributes or AJAX calls, eliminating the need for JS specific
 
    escaping.
 

	
 
    Note: This is not safe for use in attributes (e.g. onclick), because
 
    quotes are not escaped.
 

	
 
    Because the rules for parsing <script> varies between XHTML (where
 
    normal rules apply for any special characters) and HTML (where
 
    entities are not interpreted, but the literal string "</script>"
 
    is forbidden), the function ensures that the result never contains
 
    '&', '<' and '>', thus making it safe in both those contexts (but
 
    not in attributes).
 
    """
 
    return literal(
 
        ('(' + json.dumps(value) + ')')
 
        # In JSON, the following can only appear in string literals.
 
        .replace('&', r'\x26')
 
        .replace('<', r'\x3c')
 
        .replace('>', r'\x3e')
 
    )
 

	
 

	
 
def jshtml(val):
 
    """HTML escapes a string value, then converts the resulting string
 
    to its corresponding JavaScript representation (see `js`).
 

	
 
    This is used when a plain-text string (possibly containing special
 
    HTML characters) will be used by a script in an HTML context (e.g.
 
    element.innerHTML or jQuery's 'html' method).
 

	
 
    If in doubt, err on the side of using `jshtml` over `js`, since it's
 
    better to escape too much than too little.
 
    """
 
    return js(escape(val))
 

	
 

	
 
def shorter(s, size=20, firstline=False, postfix='...'):
 
    """Truncate s to size, including the postfix string if truncating.
 
    If firstline, truncate at newline.
 
    """
 
    if firstline:
 
        s = s.split('\n', 1)[0].rstrip()
 
    if len(s) > size:
 
        return s[:size - len(postfix)] + postfix
 
    return s
 

	
 

	
 
def reset(name, value, id=NotGiven, **attrs):
 
    """Create a reset button, similar to webhelpers2.html.tags.submit ."""
 
    return _input("reset", name, value, id, attrs)
 

	
 

	
 
def select(name, selected_values, options, id=NotGiven, **attrs):
 
    """Convenient wrapper of webhelpers2 to let it accept options as a tuple list"""
 
    if isinstance(options, list):
 
        option_list = options
 
        # Handle old value,label lists ... where value also can be value,label lists
 
        options = Options()
 
        for x in option_list:
 
            if isinstance(x, tuple) and len(x) == 2:
 
                value, label = x
 
            elif isinstance(x, str):
 
                value = label = x
 
            else:
 
                log.error('invalid select option %r', x)
 
                raise
 
            if isinstance(value, list):
 
                og = options.add_optgroup(label)
 
                for x in value:
 
                    if isinstance(x, tuple) and len(x) == 2:
 
                        group_value, group_label = x
 
                    elif isinstance(x, str):
 
                        group_value = group_label = x
 
                    else:
 
                        log.error('invalid select option %r', x)
 
                        raise
 
                    og.add_option(group_label, group_value)
 
            else:
 
                options.add_option(label, value)
 
    return webhelpers2_select(name, selected_values, options, id=id, **attrs)
 

	
 

	
 
safeid = _make_safe_id_component
 

	
 

	
 
def FID(raw_id, path):
 
    """
 
    Creates a unique ID for filenode based on it's hash of path and revision
 
    it's safe to use in urls
 

	
 
    :param raw_id:
 
    :param path:
 
    """
 

	
 
    return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12])
 

	
 

	
 
class _FilesBreadCrumbs(object):
 

	
 
    def __call__(self, repo_name, rev, paths):
 
        url_l = [link_to(repo_name, url('files_home',
 
                                        repo_name=repo_name,
 
                                        revision=rev, f_path=''),
 
                         class_='ypjax-link')]
 
        paths_l = paths.split('/')
 
        for cnt, p in enumerate(paths_l):
 
            if p != '':
 
                url_l.append(link_to(p,
 
                                     url('files_home',
 
                                         repo_name=repo_name,
 
                                         revision=rev,
 
                                         f_path='/'.join(paths_l[:cnt + 1])
 
                                         ),
 
                                     class_='ypjax-link'
 
                                     )
 
                             )
 

	
 
        return literal('/'.join(url_l))
 

	
 

	
 
files_breadcrumbs = _FilesBreadCrumbs()
 

	
 

	
 
class CodeHtmlFormatter(HtmlFormatter):
 
    """
 
    My code Html Formatter for source codes
 
    """
 

	
 
    def wrap(self, source, outfile):
 
        return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
 

	
 
    def _wrap_code(self, source):
 
        for cnt, it in enumerate(source):
 
            i, t = it
 
            t = '<span id="L%s">%s</span>' % (cnt + 1, t)
 
            yield i, t
 

	
 
    def _wrap_tablelinenos(self, inner):
 
        inner_lines = []
 
        lncount = 0
 
        for t, line in inner:
 
            if t:
 
                lncount += 1
 
            inner_lines.append(line)
 

	
 
        fl = self.linenostart
 
        mw = len(str(lncount + fl - 1))
 
        sp = self.linenospecial
 
        st = self.linenostep
 
        la = self.lineanchors
 
        aln = self.anchorlinenos
 
        nocls = self.noclasses
 
        if sp:
 
            lines = []
 

	
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if i % sp == 0:
 
                        if aln:
 
                            lines.append('<a href="#%s%d" class="special">%*d</a>' %
 
                                         (la, i, mw, i))
 
                        else:
 
                            lines.append('<span class="special">%*d</span>' % (mw, i))
 
                    else:
 
                        if aln:
 
                            lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                        else:
 
                            lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 
        else:
 
            lines = []
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if aln:
 
                        lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                    else:
 
                        lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 

	
 
        # in case you wonder about the seemingly redundant <div> here: since the
 
        # content in the other cell also is wrapped in a div, some browsers in
 
        # some configurations seem to mess up the formatting...
 
        if nocls:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td><div class="linenodiv">'
 
                      '<pre>' + ls + '</pre></div></td>'
 
                      '<td id="hlcode" class="code">')
 
        else:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td class="linenos"><div class="linenodiv">'
 
                      '<pre>' + ls + '</pre></div></td>'
 
                      '<td id="hlcode" class="code">')
 
        yield 0, ''.join(inner_lines)
 
        yield 0, '</td></tr></table>'
 

	
 

	
 
_whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
 

	
 

	
 
def _markup_whitespace(m):
 
    groups = m.groups()
 
    if groups[0]:
 
        return '<u>\t</u>'
 
    if groups[1]:
 
        return ' <i></i>'
 

	
 

	
 
def markup_whitespace(s):
 
    return _whitespace_re.sub(_markup_whitespace, s)
 

	
 

	
 
def pygmentize(filenode, **kwargs):
 
    """
 
    pygmentize function using pygments
 

	
 
    :param filenode:
 
    """
 
    lexer = get_custom_lexer(filenode.extension) or filenode.lexer
 
    return literal(markup_whitespace(
 
        code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs))))
 

	
 

	
 
def hsv_to_rgb(h, s, v):
 
    if s == 0.0:
 
        return v, v, v
 
    i = int(h * 6.0)  # XXX assume int() truncates!
 
    f = (h * 6.0) - i
 
    p = v * (1.0 - s)
 
    q = v * (1.0 - s * f)
 
    t = v * (1.0 - s * (1.0 - f))
 
    i = i % 6
 
    if i == 0:
 
        return v, t, p
 
    if i == 1:
 
        return q, v, p
 
    if i == 2:
 
        return p, v, t
 
    if i == 3:
 
        return p, q, v
 
    if i == 4:
 
        return t, p, v
 
    if i == 5:
 
        return v, p, q
 

	
 

	
 
def gen_color(n=10000):
 
    """generator for getting n of evenly distributed colors using
 
    hsv color and golden ratio. It always return same order of colors
 

	
 
    :returns: RGB tuple
 
    """
 

	
 
    golden_ratio = 0.618033988749895
 
    h = 0.22717784590367374
 

	
 
    for _unused in range(n):
 
        h += golden_ratio
 
        h %= 1
 
        HSV_tuple = [h, 0.95, 0.95]
 
        RGB_tuple = hsv_to_rgb(*HSV_tuple)
 
        yield [str(int(x * 256)) for x in RGB_tuple]
 

	
 

	
 
def pygmentize_annotation(repo_name, filenode, **kwargs):
 
    """
 
    pygmentize function for annotation
 

	
 
    :param filenode:
 
    """
 
    cgenerator = gen_color()
 
    color_dict = {}
 

	
 
    def get_color_string(cs):
 
        if cs in color_dict:
 
            col = color_dict[cs]
 
        else:
 
            col = color_dict[cs] = next(cgenerator)
 
        return "color: rgb(%s)! important;" % (', '.join(col))
 

	
 
    def url_func(changeset):
 
        author = escape(changeset.author)
 
        date = changeset.date
 
        message = escape(changeset.message)
 
        tooltip_html = ("<b>Author:</b> %s<br/>"
 
                        "<b>Date:</b> %s</b><br/>"
 
                        "<b>Message:</b> %s") % (author, date, message)
 

	
 
        lnk_format = show_id(changeset)
 
        uri = link_to(
 
                lnk_format,
 
                url('changeset_home', repo_name=repo_name,
 
                    revision=changeset.raw_id),
 
                style=get_color_string(changeset.raw_id),
 
                **{'data-toggle': 'popover',
 
                   'data-content': tooltip_html}
 
              )
 

	
 
        uri += '\n'
 
        return uri
 

	
 
    return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs)))
 

	
 

	
 
class _Message(object):
 
    """A message returned by ``pop_flash_messages()``.
 

	
 
    Converting the message to a string returns the message text. Instances
 
    also have the following attributes:
 

	
 
    * ``category``: the category specified when the message was created.
 
    * ``message``: the html-safe message text.
 
    """
 

	
 
    def __init__(self, category, message):
 
        self.category = category
 
        self.message = message
 

	
 

	
 
def _session_flash_messages(append=None, clear=False):
 
    """Manage a message queue in tg.session: return the current message queue
 
    after appending the given message, and possibly clearing the queue."""
 
    key = 'flash'
 
    from tg import session
 
    if key in session:
 
        flash_messages = session[key]
 
    else:
 
        if append is None:  # common fast path - also used for clearing empty queue
 
            return []  # don't bother saving
 
        flash_messages = []
 
        session[key] = flash_messages
 
    if append is not None and append not in flash_messages:
 
        flash_messages.append(append)
 
    if clear:
 
        session.pop(key, None)
 
    session.save()
 
    return flash_messages
 

	
 

	
 
def flash(message, category, logf=None):
 
    """
 
    Show a message to the user _and_ log it through the specified function
 

	
 
    category: notice (default), warning, error, success
 
    logf: a custom log function - such as log.debug
 

	
 
    logf defaults to log.info, unless category equals 'success', in which
 
    case logf defaults to log.debug.
 
    """
 
    assert category in ('error', 'success', 'warning'), category
 
    if hasattr(message, '__html__'):
 
        # render to HTML for storing in cookie
 
        safe_message = str(message)
 
    else:
 
        # Apply str - the message might be an exception with __str__
 
        # Escape, so we can trust the result without further escaping, without any risk of injection
 
        safe_message = html_escape(str(message))
 
    if logf is None:
 
        logf = log.info
 
        if category == 'success':
 
            logf = log.debug
 

	
 
    logf('Flash %s: %s', category, safe_message)
 

	
 
    _session_flash_messages(append=(category, safe_message))
 

	
 

	
 
def pop_flash_messages():
 
    """Return all accumulated messages and delete them from the session.
 

	
 
    The return value is a list of ``Message`` objects.
 
    """
 
    return [_Message(category, message) for category, message in _session_flash_messages(clear=True)]
 

	
 

	
 
age = lambda x, y=False: _age(x, y)
 
capitalize = lambda x: x.capitalize()
 
def age(x, y=False):
 
    return _age(x, y)
 

	
 
def capitalize(x):
 
    return x.capitalize()
 

	
 
email = author_email
 
short_id = lambda x: x[:12]
 
hide_credentials = lambda x: ''.join(credentials_filter(x))
 

	
 
def short_id(x):
 
    return x[:12]
 

	
 
def hide_credentials(x):
 
    return ''.join(credentials_filter(x))
 

	
 

	
 
def show_id(cs):
 
    """
 
    Configurable function that shows ID
 
    by default it's r123:fffeeefffeee
 

	
 
    :param cs: changeset instance
 
    """
 
    from kallithea import CONFIG
 
    def_len = safe_int(CONFIG.get('show_sha_length', 12))
 
    show_rev = str2bool(CONFIG.get('show_revision_number', False))
 

	
 
    raw_id = cs.raw_id[:def_len]
 
    if show_rev:
 
        return 'r%s:%s' % (cs.revision, raw_id)
 
    else:
 
        return raw_id
 

	
 

	
 
def fmt_date(date):
 
    if date:
 
        return date.strftime("%Y-%m-%d %H:%M:%S")
 
    return ""
 

	
 

	
 
def is_git(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'git'
 

	
 

	
 
def is_hg(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'hg'
 

	
 

	
 
@cache_region('long_term', 'user_attr_or_none')
 
def user_attr_or_none(author, show_attr):
 
    """Try to match email part of VCS committer string with a local user and return show_attr
 
    - or return None if user not found"""
 
    email = author_email(author)
 
    if email:
 
        from kallithea.model.db import User
 
        user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short
 
        if user is not None:
 
            return getattr(user, show_attr)
 
    return None
 

	
 

	
 
def email_or_none(author):
 
    """Try to match email part of VCS committer string with a local user.
 
    Return primary email of user, email part of the specified author name, or None."""
 
    if not author:
 
        return None
 
    email = user_attr_or_none(author, 'email')
 
    if email is not None:
 
        return email # always use user's main email address - not necessarily the one used to find user
 

	
 
    # extract email from the commit string
 
    email = author_email(author)
 
    if email:
 
        return email
 

	
 
    # No valid email, not a valid user in the system, none!
 
    return None
 

	
 

	
 
def person(author, show_attr="username"):
 
    """Find the user identified by 'author', return one of the users attributes,
 
    default to the username attribute, None if there is no user"""
 
    from kallithea.model.db import User
 
    # if author is already an instance use it for extraction
 
    if isinstance(author, User):
 
        return getattr(author, show_attr)
 

	
 
    value = user_attr_or_none(author, show_attr)
 
    if value is not None:
 
        return value
 

	
 
    # Still nothing?  Just pass back the author name if any, else the email
 
    return author_name(author) or email(author)
 

	
 

	
 
def person_by_id(id_, show_attr="username"):
 
    from kallithea.model.db import User
 
    # attr to return from fetched user
 
    person_getter = lambda usr: getattr(usr, show_attr)
 

	
 
    # maybe it's an ID ?
 
    if str(id_).isdigit() or isinstance(id_, int):
 
        id_ = int(id_)
 
        user = User.get(id_)
 
        if user is not None:
 
            return person_getter(user)
 
            return getattr(user, show_attr)
 
    return id_
 

	
 

	
 
def boolicon(value):
 
    """Returns boolean value of a value, represented as small html image of true/false
 
    icons
 

	
 
    :param value: value
 
    """
 

	
 
    if value:
 
        return HTML.tag('i', class_="icon-ok")
 
    else:
 
        return HTML.tag('i', class_="icon-minus-circled")
 

	
 

	
 
def action_parser(user_log, feed=False, parse_cs=False):
 
    """
 
    This helper will action_map the specified string action into translated
 
    fancy names with icons and links
 

	
 
    :param user_log: user log instance
 
    :param feed: use output for feeds (no html and fancy icons)
 
    :param parse_cs: parse Changesets into VCS instances
 
    """
 

	
 
    action = user_log.action
 
    action_params = ' '
 

	
 
    x = action.split(':')
 

	
 
    if len(x) > 1:
 
        action, action_params = x
 

	
 
    def get_cs_links():
 
        revs_limit = 3  # display this amount always
 
        revs_top_limit = 50  # show upto this amount of changesets hidden
 
        revs_ids = action_params.split(',')
 
        deleted = user_log.repository is None
 
        if deleted:
 
            return ','.join(revs_ids)
 

	
 
        repo_name = user_log.repository.repo_name
 

	
 
        def lnk(rev, repo_name):
 
            lazy_cs = False
 
            title_ = None
 
            url_ = '#'
 
            if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
 
                if rev.op and rev.ref_name:
 
                    if rev.op == 'delete_branch':
 
                        lbl = _('Deleted branch: %s') % rev.ref_name
 
                    elif rev.op == 'tag':
 
                        lbl = _('Created tag: %s') % rev.ref_name
 
                    else:
 
                        lbl = 'Unknown operation %s' % rev.op
 
                else:
 
                    lazy_cs = True
 
                    lbl = rev.short_id[:8]
 
                    url_ = url('changeset_home', repo_name=repo_name,
 
                               revision=rev.raw_id)
 
            else:
 
                # changeset cannot be found - it might have been stripped or removed
 
                lbl = rev[:12]
 
                title_ = _('Changeset %s not found') % lbl
 
            if parse_cs:
 
                return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
 
            return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
 
                           **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
 

	
 
        def _get_op(rev_txt):
 
            _op = None
 
            _name = rev_txt
 
            if len(rev_txt.split('=>')) == 2:
 
                _op, _name = rev_txt.split('=>')
 
            return _op, _name
 

	
 
        revs = []
 
        if len([v for v in revs_ids if v != '']) > 0:
 
            repo = None
 
            for rev in revs_ids[:revs_top_limit]:
 
                _op, _name = _get_op(rev)
 

	
 
                # we want parsed changesets, or new log store format is bad
 
                if parse_cs:
 
                    try:
 
                        if repo is None:
 
                            repo = user_log.repository.scm_instance
 
                        _rev = repo.get_changeset(rev)
 
                        revs.append(_rev)
 
                    except ChangesetDoesNotExistError:
 
                        log.error('cannot find revision %s in this repo', rev)
 
                        revs.append(rev)
 
                else:
 
                    _rev = AttributeDict({
 
                        'short_id': rev[:12],
 
                        'raw_id': rev,
 
                        'message': '',
 
                        'op': _op,
 
                        'ref_name': _name
 
                    })
 
                    revs.append(_rev)
 
        cs_links = [" " + ', '.join(
 
            [lnk(rev, repo_name) for rev in revs[:revs_limit]]
 
        )]
 
        _op1, _name1 = _get_op(revs_ids[0])
 
        _op2, _name2 = _get_op(revs_ids[-1])
 

	
 
        _rev = '%s...%s' % (_name1, _name2)
 

	
 
        compare_view = (
 
            ' <div class="compare_view" data-toggle="tooltip" title="%s">'
 
            '<a href="%s">%s</a> </div>' % (
 
                _('Show all combined changesets %s->%s') % (
 
                    revs_ids[0][:12], revs_ids[-1][:12]
 
                ),
 
                url('changeset_home', repo_name=repo_name,
 
                    revision=_rev
 
                ),
 
                _('Compare view')
 
            )
 
        )
 

	
 
        # if we have exactly one more than normally displayed
 
        # just display it, takes less space than displaying
 
        # "and 1 more revisions"
 
        if len(revs_ids) == revs_limit + 1:
 
            cs_links.append(", " + lnk(revs[revs_limit], repo_name))
 

	
 
        # hidden-by-default ones
 
        if len(revs_ids) > revs_limit + 1:
 
            uniq_id = revs_ids[0]
 
            html_tmpl = (
 
                '<span> %s <a class="show_more" id="_%s" '
 
                'href="#more">%s</a> %s</span>'
 
            )
 
            if not feed:
 
                cs_links.append(html_tmpl % (
 
                      _('and'),
 
                      uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
 
                      _('revisions')
 
                    )
 
                )
 

	
 
            if not feed:
 
                html_tmpl = '<span id="%s" style="display:none">, %s </span>'
 
            else:
 
                html_tmpl = '<span id="%s"> %s </span>'
 

	
 
            morelinks = ', '.join(
 
              [lnk(rev, repo_name) for rev in revs[revs_limit:]]
 
            )
 

	
 
            if len(revs_ids) > revs_top_limit:
 
                morelinks += ', ...'
 

	
 
            cs_links.append(html_tmpl % (uniq_id, morelinks))
 
        if len(revs) > 1:
 
            cs_links.append(compare_view)
 
        return ''.join(cs_links)
 

	
 
    def get_fork_name():
 
        repo_name = action_params
 
        url_ = url('summary_home', repo_name=repo_name)
 
        return _('Fork name %s') % link_to(action_params, url_)
 

	
 
    def get_user_name():
 
        user_name = action_params
 
        return user_name
 

	
 
    def get_users_group():
 
        group_name = action_params
 
        return group_name
 

	
 
    def get_pull_request():
 
        from kallithea.model.db import PullRequest
 
        pull_request_id = action_params
 
        nice_id = PullRequest.make_nice_id(pull_request_id)
 

	
 
        deleted = user_log.repository is None
 
        if deleted:
 
            repo_name = user_log.repository_name
 
        else:
 
            repo_name = user_log.repository.repo_name
 

	
 
        return link_to(_('Pull request %s') % nice_id,
 
                    url('pullrequest_show', repo_name=repo_name,
 
                    pull_request_id=pull_request_id))
 

	
 
    def get_archive_name():
 
        archive_name = action_params
 
        return archive_name
 

	
 
    # action : translated str, callback(extractor), icon
 
    action_map = {
 
        'user_deleted_repo':           (_('[deleted] repository'),
 
                                        None, 'icon-trashcan'),
 
        'user_created_repo':           (_('[created] repository'),
 
                                        None, 'icon-plus'),
 
        'user_created_fork':           (_('[created] repository as fork'),
 
                                        None, 'icon-fork'),
 
        'user_forked_repo':            (_('[forked] repository'),
 
                                        get_fork_name, 'icon-fork'),
 
        'user_updated_repo':           (_('[updated] repository'),
 
                                        None, 'icon-pencil'),
 
        'user_downloaded_archive':      (_('[downloaded] archive from repository'),
 
                                        get_archive_name, 'icon-download-cloud'),
 
        'admin_deleted_repo':          (_('[delete] repository'),
 
                                        None, 'icon-trashcan'),
 
        'admin_created_repo':          (_('[created] repository'),
 
                                        None, 'icon-plus'),
 
        'admin_forked_repo':           (_('[forked] repository'),
 
                                        None, 'icon-fork'),
 
        'admin_updated_repo':          (_('[updated] repository'),
 
                                        None, 'icon-pencil'),
 
        'admin_created_user':          (_('[created] user'),
 
                                        get_user_name, 'icon-user'),
 
        'admin_updated_user':          (_('[updated] user'),
 
                                        get_user_name, 'icon-user'),
 
        'admin_created_users_group':   (_('[created] user group'),
 
                                        get_users_group, 'icon-pencil'),
 
        'admin_updated_users_group':   (_('[updated] user group'),
 
                                        get_users_group, 'icon-pencil'),
 
        'user_commented_revision':     (_('[commented] on revision in repository'),
 
                                        get_cs_links, 'icon-comment'),
 
        'user_commented_pull_request': (_('[commented] on pull request for'),
 
                                        get_pull_request, 'icon-comment'),
 
        'user_closed_pull_request':    (_('[closed] pull request for'),
 
                                        get_pull_request, 'icon-ok'),
 
        'push':                        (_('[pushed] into'),
 
                                        get_cs_links, 'icon-move-up'),
 
        'push_local':                  (_('[committed via Kallithea] into repository'),
 
                                        get_cs_links, 'icon-pencil'),
 
        'push_remote':                 (_('[pulled from remote] into repository'),
 
                                        get_cs_links, 'icon-move-up'),
 
        'pull':                        (_('[pulled] from'),
 
                                        None, 'icon-move-down'),
 
        'started_following_repo':      (_('[started following] repository'),
 
                                        None, 'icon-heart'),
 
        'stopped_following_repo':      (_('[stopped following] repository'),
 
                                        None, 'icon-heart-empty'),
 
    }
 

	
 
    action_str = action_map.get(action, action)
 
    if feed:
 
        action = action_str[0].replace('[', '').replace(']', '')
 
    else:
 
        action = action_str[0] \
 
            .replace('[', '<b>') \
 
            .replace(']', '</b>')
 

	
 
    action_params_func = lambda: ""
 

	
 
    if callable(action_str[1]):
 
        action_params_func = action_str[1]
 
    action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "")
 

	
 
    def action_parser_icon():
 
        action = user_log.action
 
        action_params = None
 
        x = action.split(':')
 

	
 
        if len(x) > 1:
 
            action, action_params = x
 

	
 
        ico = action_map.get(action, ['', '', ''])[2]
 
        html = """<i class="%s"></i>""" % ico
 
        return literal(html)
 

	
 
    # returned callbacks we need to call to get
 
    return [lambda: literal(action), action_params_func, action_parser_icon]
 

	
 

	
 
#==============================================================================
 
# GRAVATAR URL
 
#==============================================================================
 
def gravatar_div(email_address, cls='', size=30, **div_attributes):
 
    """Return an html literal with a span around a gravatar if they are enabled.
 
    Extra keyword parameters starting with 'div_' will get the prefix removed
 
    and '_' changed to '-' and be used as attributes on the div. The default
 
    class is 'gravatar'.
 
    """
 
    from tg import tmpl_context as c
 
    if not c.visual.use_gravatar:
 
        return ''
 
    if 'div_class' not in div_attributes:
 
        div_attributes['div_class'] = "gravatar"
 
    attributes = []
 
    for k, v in sorted(div_attributes.items()):
 
        assert k.startswith('div_'), k
 
        attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
 
    return literal("""<span%s>%s</span>""" %
 
                   (''.join(attributes),
 
                    gravatar(email_address, cls=cls, size=size)))
 

	
 

	
 
def gravatar(email_address, cls='', size=30):
 
    """return html element of the gravatar
 

	
 
    This method will return an <img> with the resolution double the size (for
 
    retina screens) of the image. If the url returned from gravatar_url is
 
    empty then we fallback to using an icon.
 

	
 
    """
 
    from tg import tmpl_context as c
 
    if not c.visual.use_gravatar:
 
        return ''
 

	
 
    src = gravatar_url(email_address, size * 2)
 

	
 
    if src:
 
        # here it makes sense to use style="width: ..." (instead of, say, a
 
        # stylesheet) because we using this to generate a high-res (retina) size
 
        html = ('<i class="icon-gravatar {cls}"'
 
                ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"'
 
                '></i>').format(cls=cls, size=size, src=src)
 

	
 
    else:
 
        # if src is empty then there was no gravatar, so we use a font icon
 
        html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
 
            .format(cls=cls, size=size, src=src))
 

	
 
    return literal(html)
 

	
 

	
 
def gravatar_url(email_address, size=30, default=''):
 
    # doh, we need to re-import those to mock it later
 
    from kallithea.config.routing import url
 
    from kallithea.model.db import User
 
    from tg import tmpl_context as c
 
    if not c.visual.use_gravatar:
 
        return ""
 

	
 
    _def = 'anonymous@kallithea-scm.org'  # default gravatar
 
    email_address = email_address or _def
 

	
 
    if email_address == _def:
 
        return default
 

	
 
    parsed_url = urllib.parse.urlparse(url.current(qualified=True))
 
    url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \
 
               .replace('{email}', email_address) \
 
               .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \
 
               .replace('{netloc}', parsed_url.netloc) \
 
               .replace('{scheme}', parsed_url.scheme) \
 
               .replace('{size}', str(size))
 
    return url
 

	
 

	
 
def changed_tooltip(nodes):
 
    """
 
    Generates a html string for changed nodes in changeset page.
 
    It limits the output to 30 entries
 

	
 
    :param nodes: LazyNodesGenerator
 
    """
 
    if nodes:
 
        pref = ': <br/> '
 
        suf = ''
 
        if len(nodes) > 30:
 
            suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
 
        return literal(pref + '<br/> '.join([x.path
 
                                             for x in nodes[:30]]) + suf)
 
    else:
 
        return ': ' + _('No files')
 

	
 

	
 
def fancy_file_stats(stats):
 
    """
 
    Displays a fancy two colored bar for number of added/deleted
 
    lines of code on file
 

	
 
    :param stats: two element list of added/deleted lines of code
 
    """
 
    from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
 
        MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
 

	
 
    a, d = stats['added'], stats['deleted']
 
    width = 100
 

	
 
    if stats['binary']:
 
        # binary mode
 
        lbl = ''
 
        bin_op = 1
 

	
 
        if BIN_FILENODE in stats['ops']:
 
            lbl = 'bin+'
 

	
 
        if NEW_FILENODE in stats['ops']:
 
            lbl += _('new file')
 
            bin_op = NEW_FILENODE
 
        elif MOD_FILENODE in stats['ops']:
 
            lbl += _('mod')
 
            bin_op = MOD_FILENODE
 
        elif DEL_FILENODE in stats['ops']:
 
            lbl += _('del')
 
            bin_op = DEL_FILENODE
 
        elif RENAMED_FILENODE in stats['ops']:
 
            lbl += _('rename')
 
            bin_op = RENAMED_FILENODE
 

	
 
        # chmod can go with other operations
 
        if CHMOD_FILENODE in stats['ops']:
 
            _org_lbl = _('chmod')
 
            lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
 

	
 
        #import ipdb;ipdb.set_trace()
 
        b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
 
        b_a = '<div class="bin bin1" style="width:0%"></div>'
 
        return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
 

	
 
    t = stats['added'] + stats['deleted']
 
    unit = float(width) / (t or 1)
 

	
 
    # needs > 9% of width to be visible or 0 to be hidden
 
    a_p = max(9, unit * a) if a > 0 else 0
 
    d_p = max(9, unit * d) if d > 0 else 0
 
    p_sum = a_p + d_p
 

	
 
    if p_sum > width:
 
        # adjust the percentage to be == 100% since we adjusted to 9
 
        if a_p > d_p:
 
            a_p = a_p - (p_sum - width)
 
        else:
 
            d_p = d_p - (p_sum - width)
 

	
 
    a_v = a if a > 0 else ''
 
    d_v = d if d > 0 else ''
 

	
 
    d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
 
        a_p, a_v
 
    )
 
    d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
 
        d_p, d_v
 
    )
 
    return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
 

	
 

	
 
_URLIFY_RE = re.compile(r'''
 
# URL markup
 
(?P<url>%s) |
 
# @mention markup
 
(?P<mention>%s) |
 
# Changeset hash markup
 
(?<!\w|[-_])
 
  (?P<hash>[0-9a-f]{12,40})
 
(?!\w|[-_]) |
 
# Markup of *bold text*
 
(?:
 
  (?:^|(?<=\s))
 
  (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
 
  (?![*\w])
 
) |
 
# "Stylize" markup
 
\[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
 
\[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
 
\[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
 
\[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
 
\[(?P<tag>[a-z]+)\]
 
''' % (url_re.pattern, MENTIONS_REGEX.pattern),
 
    re.VERBOSE | re.MULTILINE | re.IGNORECASE)
 

	
 

	
 
def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
 
    """
 
    Parses given text message and make literal html with markup.
 
    The text will be truncated to the specified length.
 
    Hashes are turned into changeset links to specified repository.
 
    URLs links to what they say.
 
    Issues are linked to given issue-server.
 
    If link_ is provided, all text not already linking somewhere will link there.
 
    >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>")
 
    literal('Urlify <a href="http://example.com/">http://example.com/</a> and &#39;<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> &lt;b&gt;markup/b&gt;')
 
    """
 

	
 
    def _replace(match_obj):
 
        url = match_obj.group('url')
 
        if url is not None:
 
            return '<a href="%(url)s">%(url)s</a>' % {'url': url}
 
        mention = match_obj.group('mention')
 
        if mention is not None:
 
            return '<b>%s</b>' % mention
 
        hash_ = match_obj.group('hash')
 
        if hash_ is not None and repo_name is not None:
 
            from kallithea.config.routing import url  # doh, we need to re-import url to mock it later
 
            return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
 
                 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
 
                 'hash': hash_,
 
                }
 
        bold = match_obj.group('bold')
 
        if bold is not None:
 
            return '<b>*%s*</b>' % _urlify(bold[1:-1])
 
        if stylize:
 
            seen = match_obj.group('seen')
 
            if seen:
 
                return '<div class="label label-meta" data-tag="see">see =&gt; %s</div>' % seen
 
            license = match_obj.group('license')
 
            if license:
 
                return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license)
 
            tagtype = match_obj.group('tagtype')
 
            if tagtype:
 
                tagvalue = match_obj.group('tagvalue')
 
                return '<div class="label label-meta" data-tag="%s">%s =&gt; <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue)
 
            lang = match_obj.group('lang')
 
            if lang:
 
                return '<div class="label label-meta" data-tag="lang">%s</div>' % lang
 
            tag = match_obj.group('tag')
 
            if tag:
 
                return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag)
 
        return match_obj.group(0)
 

	
 
    def _urlify(s):
 
        """
 
        Extract urls from text and make html links out of them
 
        """
 
        return _URLIFY_RE.sub(_replace, s)
 

	
 
    if truncate is None:
 
        s = s.rstrip()
 
    else:
 
        s = truncatef(s, truncate, whole_word=True)
 
    s = html_escape(s)
 
    s = _urlify(s)
 
    if repo_name is not None:
 
        s = urlify_issues(s, repo_name)
 
    if link_ is not None:
 
        # make href around everything that isn't a href already
 
        s = linkify_others(s, link_)
 
    s = s.replace('\r\n', '<br/>').replace('\n', '<br/>')
 
    # Turn HTML5 into more valid HTML4 as required by some mail readers.
 
    # (This is not done in one step in html_escape, because character codes like
 
    # &#123; risk to be seen as an issue reference due to the presence of '#'.)
 
    s = s.replace("&apos;", "&#39;")
 
    return literal(s)
 

	
 

	
 
def linkify_others(t, l):
 
    """Add a default link to html with links.
 
    HTML doesn't allow nesting of links, so the outer link must be broken up
 
    in pieces and give space for other links.
 
    """
 
    urls = re.compile(r'(\<a.*?\<\/a\>)',)
 
    links = []
 
    for e in urls.split(t):
 
        if e.strip() and not urls.match(e):
 
            links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
 
        else:
 
            links.append(e)
 

	
 
    return ''.join(links)
 

	
 

	
 
# Global variable that will hold the actual urlify_issues function body.
 
# Will be set on first use when the global configuration has been read.
 
_urlify_issues_f = None
 

	
 

	
 
def urlify_issues(newtext, repo_name):
 
    """Urlify issue references according to .ini configuration"""
 
    global _urlify_issues_f
 
    if _urlify_issues_f is None:
 
        from kallithea import CONFIG
 
        from kallithea.model.db import URL_SEP
 
        assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
 

	
 
        # Build chain of urlify functions, starting with not doing any transformation
 
        tmp_urlify_issues_f = lambda s: s
 
        def tmp_urlify_issues_f(s):
 
            return s
 

	
 
        issue_pat_re = re.compile(r'issue_pat(.*)')
 
        for k in CONFIG:
 
            # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
 
            m = issue_pat_re.match(k)
 
            if m is None:
 
                continue
 
            suffix = m.group(1)
 
            issue_pat = CONFIG.get(k)
 
            issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
 
            issue_sub = CONFIG.get('issue_sub%s' % suffix)
 
            if not issue_pat or not issue_server_link or issue_sub is None: # issue_sub can be empty but should be present
 
                log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub)
 
                continue
 

	
 
            # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
 
            try:
 
                issue_re = re.compile(issue_pat)
 
            except re.error as e:
 
                log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', suffix, issue_pat, issue_server_link, issue_sub, str(e))
 
                continue
 

	
 
            log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub)
 

	
 
            def issues_replace(match_obj,
 
                               issue_server_link=issue_server_link, issue_sub=issue_sub):
 
                try:
 
                    issue_url = match_obj.expand(issue_server_link)
 
                except (IndexError, re.error) as e:
 
                    log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
 
                    issue_url = issue_server_link
 
                issue_url = issue_url.replace('{repo}', repo_name)
 
                issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
 
                # if issue_sub is empty use the matched issue reference verbatim
 
                if not issue_sub:
 
                    issue_text = match_obj.group()
 
                else:
 
                    try:
 
                        issue_text = match_obj.expand(issue_sub)
 
                    except (IndexError, re.error) as e:
 
                        log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e))
 
                        issue_text = match_obj.group()
 

	
 
                return (
 
                    '<a class="issue-tracker-link" href="%(url)s">'
 
                    '%(text)s'
 
                    '</a>'
 
                    ) % {
 
                     'url': issue_url,
 
                     'text': issue_text,
 
                    }
 
            tmp_urlify_issues_f = (lambda s,
 
                                          issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f:
 
                                   issue_re.sub(issues_replace, chain_f(s)))
 

	
 
            def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f):
 
                return issue_re.sub(issues_replace, chain_f(s))
 

	
 
        # Set tmp function globally - atomically
 
        _urlify_issues_f = tmp_urlify_issues_f
 

	
 
    return _urlify_issues_f(newtext)
 

	
 

	
 
def render_w_mentions(source, repo_name=None):
 
    """
 
    Render plain text with revision hashes and issue references urlified
 
    and with @mention highlighting.
 
    """
 
    s = safe_str(source)
 
    s = urlify_text(s, repo_name=repo_name)
 
    return literal('<div class="formatted-fixed">%s</div>' % s)
 

	
 

	
 
def short_ref(ref_type, ref_name):
 
    if ref_type == 'rev':
 
        return short_id(ref_name)
 
    return ref_name
 

	
 

	
 
def link_to_ref(repo_name, ref_type, ref_name, rev=None):
 
    """
 
    Return full markup for a href to changeset_home for a changeset.
 
    If ref_type is branch it will link to changelog.
 
    ref_name is shortened if ref_type is 'rev'.
 
    if rev is specified show it too, explicitly linking to that revision.
 
    """
 
    txt = short_ref(ref_type, ref_name)
 
    if ref_type == 'branch':
 
        u = url('changelog_home', repo_name=repo_name, branch=ref_name)
 
    else:
 
        u = url('changeset_home', repo_name=repo_name, revision=ref_name)
 
    l = link_to(repo_name + '#' + txt, u)
 
    if rev and ref_type != 'rev':
 
        l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
 
    return l
 

	
 

	
 
def changeset_status(repo, revision):
 
    from kallithea.model.changeset_status import ChangesetStatusModel
 
    return ChangesetStatusModel().get_status(repo, revision)
 

	
 

	
 
def changeset_status_lbl(changeset_status):
 
    from kallithea.model.db import ChangesetStatus
 
    return ChangesetStatus.get_status_lbl(changeset_status)
 

	
 

	
 
def get_permission_name(key):
 
    from kallithea.model.db import Permission
 
    return dict(Permission.PERMS).get(key)
 

	
 

	
 
def journal_filter_help():
 
    return _(textwrap.dedent('''
 
        Example filter terms:
 
            repository:vcs
 
            username:developer
 
            action:*push*
 
            ip:127.0.0.1
 
            date:20120101
 
            date:[20120101100000 TO 20120102]
 

	
 
        Generate wildcards using '*' character:
 
            "repository:vcs*" - search everything starting with 'vcs'
 
            "repository:*vcs*" - search for repository containing 'vcs'
 

	
 
        Optional AND / OR operators in queries
 
            "repository:vcs OR repository:test"
 
            "username:test AND repository:test*"
 
    '''))
 

	
 

	
 
def not_mapped_error(repo_name):
 
    flash(_('%s repository is not mapped to db perhaps'
 
            ' it was created or renamed from the filesystem'
 
            ' please run the application again'
 
            ' in order to rescan repositories') % repo_name, category='error')
 

	
 

	
 
def ip_range(ip_addr):
 
    from kallithea.model.db import UserIpMap
 
    s, e = UserIpMap._get_ip_range(ip_addr)
 
    return '%s - %s' % (s, e)
 

	
 

	
 
session_csrf_secret_name = "_session_csrf_secret_token"
 

	
 
def session_csrf_secret_token():
 
    """Return (and create) the current session's CSRF protection token."""
 
    from tg import session
 
    if not session_csrf_secret_name in session:
 
        session[session_csrf_secret_name] = str(random.getrandbits(128))
 
        session.save()
 
    return session[session_csrf_secret_name]
 

	
 
def form(url, method="post", **attrs):
 
    """Like webhelpers.html.tags.form , but automatically adding
 
    session_csrf_secret_token for POST. The secret is thus never leaked in GET
 
    URLs.
 
    """
 
    form = insecure_form(url, method, **attrs)
 
    if method.lower() == 'get':
 
        return form
 
    return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;")
kallithea/lib/rcmail/response.py
Show inline comments
 
# The code in this module is entirely lifted from the Lamson project
 
# (http://lamsonproject.org/).  Its copyright is:
 

	
 
# Copyright (c) 2008, Zed A. Shaw
 
# All rights reserved.
 

	
 
# It is provided under this license:
 

	
 
# Redistribution and use in source and binary forms, with or without
 
# modification, are permitted provided that the following conditions are met:
 

	
 
# * Redistributions of source code must retain the above copyright notice, this
 
#   list of conditions and the following disclaimer.
 

	
 
# * Redistributions in binary form must reproduce the above copyright notice,
 
#   this list of conditions and the following disclaimer in the documentation
 
#   and/or other materials provided with the distribution.
 

	
 
# * Neither the name of the Zed A. Shaw nor the names of its contributors may
 
#   be used to endorse or promote products derived from this software without
 
#   specific prior written permission.
 

	
 
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
 
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
 
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
 
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
 
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
 
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
 
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 
# POSSIBILITY OF SUCH DAMAGE.
 

	
 
import mimetypes
 
import os
 
import string
 
from email import encoders
 
from email.charset import Charset
 
from email.mime.base import MIMEBase
 
from email.utils import parseaddr
 

	
 

	
 
ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc']
 
DEFAULT_ENCODING = "utf-8"
 
VALUE_IS_EMAIL_ADDRESS = lambda v: '@' in v
 

	
 
def VALUE_IS_EMAIL_ADDRESS(v):
 
    return '@' in v
 

	
 

	
 
def normalize_header(header):
 
    return string.capwords(header.lower(), '-')
 

	
 

	
 
class EncodingError(Exception):
 
    """Thrown when there is an encoding error."""
 
    pass
 

	
 

	
 
class MailBase(object):
 
    """MailBase is used as the basis of lamson.mail and contains the basics of
 
    encoding an email.  You actually can do all your email processing with this
 
    class, but it's more raw.
 
    """
 
    def __init__(self, items=()):
 
        self.headers = dict(items)
 
        self.parts = []
 
        self.body = None
 
        self.content_encoding = {'Content-Type': (None, {}),
 
                                 'Content-Disposition': (None, {}),
 
                                 'Content-Transfer-Encoding': (None, {})}
 

	
 
    def __getitem__(self, key):
 
        return self.headers.get(normalize_header(key), None)
 

	
 
    def __len__(self):
 
        return len(self.headers)
 

	
 
    def __iter__(self):
 
        return iter(self.headers)
 

	
 
    def __contains__(self, key):
 
        return normalize_header(key) in self.headers
 

	
 
    def __setitem__(self, key, value):
 
        self.headers[normalize_header(key)] = value
 

	
 
    def __delitem__(self, key):
 
        del self.headers[normalize_header(key)]
 

	
 
    def __bool__(self):
 
        return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0
 

	
 
    def keys(self):
 
        """Returns the sorted keys."""
 
        return sorted(self.headers.keys())
 

	
 
    def attach_file(self, filename, data, ctype, disposition):
 
        """
 
        A file attachment is a raw attachment with a disposition that
 
        indicates the file name.
 
        """
 
        assert filename, "You can't attach a file without a filename."
 
        ctype = ctype.lower()
 

	
 
        part = MailBase()
 
        part.body = data
 
        part.content_encoding['Content-Type'] = (ctype, {'name': filename})
 
        part.content_encoding['Content-Disposition'] = (disposition,
 
                                                        {'filename': filename})
 
        self.parts.append(part)
 

	
 
    def attach_text(self, data, ctype):
 
        """
 
        This attaches a simpler text encoded part, which doesn't have a
 
        filename.
 
        """
 
        ctype = ctype.lower()
 

	
 
        part = MailBase()
 
        part.body = data
 
        part.content_encoding['Content-Type'] = (ctype, {})
 
        self.parts.append(part)
 

	
 
    def walk(self):
 
        for p in self.parts:
 
            yield p
 
            for x in p.walk():
 
                yield x
 

	
 

	
 
class MailResponse(object):
 
    """
 
    You are given MailResponse objects from the lamson.view methods, and
 
    whenever you want to generate an email to send to someone.  It has the
 
    same basic functionality as MailRequest, but it is designed to be written
 
    to, rather than read from (although you can do both).
 

	
 
    You can easily set a Body or Html during creation or after by passing it
 
    as __init__ parameters, or by setting those attributes.
 

	
 
    You can initially set the From, To, and Subject, but they are headers so
 
    use the dict notation to change them: msg['From'] = 'joe@example.com'.
 

	
 
    The message is not fully crafted until right when you convert it with
 
    MailResponse.to_message.  This lets you change it and work with it, then
 
    send it out when it's ready.
 
    """
 
    def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None,
 
                 separator="; "):
 
        self.Body = Body
 
        self.Html = Html
 
        self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)])
 
        self.multipart = self.Body and self.Html
 
        self.attachments = []
 
        self.separator = separator
 

	
 
    def __contains__(self, key):
 
        return self.base.__contains__(key)
 

	
 
    def __getitem__(self, key):
 
        return self.base.__getitem__(key)
 

	
 
    def __setitem__(self, key, val):
 
        return self.base.__setitem__(key, val)
 

	
 
    def __delitem__(self, name):
 
        del self.base[name]
 

	
 
    def attach(self, filename=None, content_type=None, data=None,
 
               disposition=None):
 
        """
 

	
 
        Simplifies attaching files from disk or data as files.  To attach
 
        simple text simple give data and a content_type.  To attach a file,
 
        give the data/content_type/filename/disposition combination.
 

	
 
        For convenience, if you don't give data and only a filename, then it
 
        will read that file's contents when you call to_message() later.  If
 
        you give data and filename then it will assume you've filled data
 
        with what the file's contents are and filename is just the name to
 
        use.
 
        """
 

	
 
        assert filename or data, ("You must give a filename or some data to "
 
                                  "attach.")
 
        assert data or os.path.exists(filename), ("File doesn't exist, and no "
 
                                                  "data given.")
 

	
 
        self.multipart = True
 

	
 
        if filename and not content_type:
 
            content_type, encoding = mimetypes.guess_type(filename)
 

	
 
        assert content_type, ("No content type given, and couldn't guess "
 
                              "from the filename: %r" % filename)
 

	
 
        self.attachments.append({'filename': filename,
 
                                 'content_type': content_type,
 
                                 'data': data,
 
                                 'disposition': disposition})
 

	
 
    def attach_part(self, part):
 
        """
 
        Attaches a raw MailBase part from a MailRequest (or anywhere)
 
        so that you can copy it over.
 
        """
 
        self.multipart = True
 

	
 
        self.attachments.append({'filename': None,
 
                                 'content_type': None,
 
                                 'data': None,
 
                                 'disposition': None,
 
                                 'part': part,
 
                                 })
 

	
 
    def attach_all_parts(self, mail_request):
 
        """
 
        Used for copying the attachment parts of a mail.MailRequest
 
        object for mailing lists that need to maintain attachments.
 
        """
 
        for part in mail_request.all_parts():
 
            self.attach_part(part)
 

	
 
        self.base.content_encoding = mail_request.base.content_encoding.copy()
 

	
 
    def clear(self):
 
        """
 
        Clears out the attachments so you can redo them.  Use this to keep the
 
        headers for a series of different messages with different attachments.
 
        """
 
        del self.attachments[:]
 
        del self.base.parts[:]
 
        self.multipart = False
 

	
 
    def update(self, message):
 
        """
 
        Used to easily set a bunch of heading from another dict
 
        like object.
 
        """
 
        for k in message.keys():
 
            self.base[k] = message[k]
 

	
 
    def __str__(self):
 
        """
 
        Converts to a string.
 
        """
 
        return self.to_message().as_string()
 

	
 
    def _encode_attachment(self, filename=None, content_type=None, data=None,
 
                           disposition=None, part=None):
 
        """
 
        Used internally to take the attachments mentioned in self.attachments
 
        and do the actual encoding in a lazy way when you call to_message.
 
        """
 
        if part:
 
            self.base.parts.append(part)
 
        elif filename:
 
            if not data:
 
                data = open(filename).read()
 

	
 
            self.base.attach_file(filename, data, content_type,
 
                                  disposition or 'attachment')
 
        else:
 
            self.base.attach_text(data, content_type)
 

	
 
        ctype = self.base.content_encoding['Content-Type'][0]
 

	
 
        if ctype and not ctype.startswith('multipart'):
 
            self.base.content_encoding['Content-Type'] = ('multipart/mixed', {})
 

	
 
    def to_message(self):
 
        """
 
        Figures out all the required steps to finally craft the
 
        message you need and return it.  The resulting message
 
        is also available as a self.base attribute.
 

	
 
        What is returned is a Python email API message you can
 
        use with those APIs.  The self.base attribute is the raw
 
        lamson.encoding.MailBase.
 
        """
 
        del self.base.parts[:]
 

	
 
        if self.Body and self.Html:
 
            self.multipart = True
 
            self.base.content_encoding['Content-Type'] = (
 
                'multipart/alternative', {})
 

	
 
        if self.multipart:
 
            self.base.body = None
 
            if self.Body:
 
                self.base.attach_text(self.Body, 'text/plain')
 

	
 
            if self.Html:
 
                self.base.attach_text(self.Html, 'text/html')
 

	
 
            for args in self.attachments:
 
                self._encode_attachment(**args)
 

	
 
        elif self.Body:
 
            self.base.body = self.Body
 
            self.base.content_encoding['Content-Type'] = ('text/plain', {})
 

	
 
        elif self.Html:
 
            self.base.body = self.Html
 
            self.base.content_encoding['Content-Type'] = ('text/html', {})
 

	
 
        return to_message(self.base, separator=self.separator)
 

	
 
    def all_parts(self):
 
        """
 
        Returns all the encoded parts.  Only useful for debugging
 
        or inspecting after calling to_message().
 
        """
 
        return self.base.parts
 

	
 
    def keys(self):
 
        return self.base.keys()
 

	
 

	
 
def to_message(mail, separator="; "):
 
    """
 
    Given a MailBase message, this will construct a MIMEPart
 
    that is canonicalized for use with the Python email API.
 
    """
 
    ctype, params = mail.content_encoding['Content-Type']
 

	
 
    if not ctype:
 
        if mail.parts:
 
            ctype = 'multipart/mixed'
 
        else:
 
            ctype = 'text/plain'
 
    else:
 
        if mail.parts:
 
            assert ctype.startswith(("multipart", "message")), \
 
                   "Content type should be multipart or message, not %r" % ctype
 

	
 
    # adjust the content type according to what it should be now
 
    mail.content_encoding['Content-Type'] = (ctype, params)
 

	
 
    try:
 
        out = MIMEPart(ctype, **params)
 
    except TypeError as e:  # pragma: no cover
 
        raise EncodingError("Content-Type malformed, not allowed: %r; "
 
                            "%r (Python ERROR: %s)" %
 
                            (ctype, params, e.args[0]))
 

	
 
    for k in mail.keys():
 
        if k in ADDRESS_HEADERS_WHITELIST:
 
            out[k] = header_to_mime_encoding(
 
                                         mail[k],
 
                                         not_email=False,
 
                                         separator=separator
 
                                     )
 
        else:
 
            out[k] = header_to_mime_encoding(
 
                                         mail[k],
 
                                         not_email=True
 
                                    )
 

	
 
    out.extract_payload(mail)
 

	
 
    # go through the children
 
    for part in mail.parts:
 
        out.attach(to_message(part))
 

	
 
    return out
 

	
 

	
 
class MIMEPart(MIMEBase):
 
    """
 
    A reimplementation of nearly everything in email.mime to be more useful
 
    for actually attaching things.  Rather than one class for every type of
 
    thing you'd encode, there's just this one, and it figures out how to
 
    encode what you ask it.
 
    """
 
    def __init__(self, type, **params):
 
        self.maintype, self.subtype = type.split('/')
 
        MIMEBase.__init__(self, self.maintype, self.subtype, **params)
 

	
 
    def add_text(self, content):
 
        # this is text, so encode it in canonical form
 
        try:
 
            encoded = content.encode('ascii')
 
            charset = 'ascii'
 
        except UnicodeError:
 
            encoded = content.encode('utf-8')
 
            charset = 'utf-8'
 

	
 
        self.set_payload(encoded, charset=charset)
 

	
 
    def extract_payload(self, mail):
 
        if mail.body is None:
 
            return  # only None, '' is still ok
 

	
 
        ctype, _ctype_params = mail.content_encoding['Content-Type']
 
        cdisp, cdisp_params = mail.content_encoding['Content-Disposition']
 

	
 
        assert ctype, ("Extract payload requires that mail.content_encoding "
 
                       "have a valid Content-Type.")
 

	
 
        if ctype.startswith("text/"):
 
            self.add_text(mail.body)
 
        else:
 
            if cdisp:
 
                # replicate the content-disposition settings
 
                self.add_header('Content-Disposition', cdisp, **cdisp_params)
 

	
 
            self.set_payload(mail.body)
 
            encoders.encode_base64(self)
 

	
 
    def __repr__(self):
 
        return "<MIMEPart '%s/%s': %r, %r, multipart=%r>" % (
 
            self.subtype,
 
            self.maintype,
 
            self['Content-Type'],
 
            self['Content-Disposition'],
 
            self.is_multipart())
 

	
 

	
 
def header_to_mime_encoding(value, not_email=False, separator=", "):
 
    if not value:
 
        return ""
 

	
 
    encoder = Charset(DEFAULT_ENCODING)
 
    if isinstance(value, list):
 
        return separator.join(properly_encode_header(
 
            v, encoder, not_email) for v in value)
 
    else:
 
        return properly_encode_header(value, encoder, not_email)
 

	
 

	
 
def properly_encode_header(value, encoder, not_email):
 
    """
 
    The only thing special (weird) about this function is that it tries
 
    to do a fast check to see if the header value has an email address in
 
    it.  Since random headers could have an email address, and email addresses
 
    have weird special formatting rules, we have to check for it.
 

	
 
    Normally this works fine, but in Librelist, we need to "obfuscate" email
 
    addresses by changing the '@' to '-AT-'.  This is where
 
    VALUE_IS_EMAIL_ADDRESS exists.  It's a simple lambda returning True/False
 
    to check if a header value has an email address.  If you need to make this
 
    check different, then change this.
 
    """
 
    try:
 
        value.encode("ascii")
 
        return value
 
    except UnicodeError:
 
        if not not_email and VALUE_IS_EMAIL_ADDRESS(value):
 
            # this could have an email address, make sure we don't screw it up
 
            name, address = parseaddr(value)
 
            return '"%s" <%s>' % (encoder.header_encode(name), address)
 

	
 
        return encoder.header_encode(value)
kallithea/lib/vcs/backends/git/changeset.py
Show inline comments
 
import re
 
from io import BytesIO
 
from itertools import chain
 
from subprocess import PIPE, Popen
 

	
 
from dulwich import objects
 
from dulwich.config import ConfigFile
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, RepositoryError, VCSError
 
from kallithea.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class GitChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at a revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self._stat_modes = {}
 
        self.repository = repository
 
        try:
 
            commit = self.repository._repo[ascii_bytes(revision)]
 
            if isinstance(commit, objects.Tag):
 
                revision = safe_str(commit.object[1])
 
                commit = self.repository._repo.get_object(commit.object[1])
 
        except KeyError:
 
            raise RepositoryError("Cannot get object with id %s" % revision)
 
        self.raw_id = ascii_str(commit.id)
 
        self.short_id = self.raw_id[:12]
 
        self._commit = commit  # a Dulwich Commmit with .id
 
        self._tree_id = commit.tree
 
        self._committer_property = 'committer'
 
        self._author_property = 'author'
 
        self._date_property = 'commit_time'
 
        self._date_tz_property = 'commit_timezone'
 
        self.revision = repository.revisions.index(self.raw_id)
 

	
 
        self.nodes = {}
 
        self._paths = {}
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        return ()
 

	
 
    @LazyProperty
 
    def message(self):
 
        return safe_str(self._commit.message)
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_str(getattr(self._commit, self._committer_property))
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_str(getattr(self._commit, self._author_property))
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(getattr(self._commit, self._date_property),
 
                                  getattr(self._commit, self._date_tz_property))
 

	
 
    @LazyProperty
 
    def _timestamp(self):
 
        return getattr(self._commit, self._date_property)
 

	
 
    @LazyProperty
 
    def status(self):
 
        """
 
        Returns modified, added, removed, deleted files for current changeset
 
        """
 
        return self.changed, self.added, self.removed
 

	
 
    @LazyProperty
 
    def tags(self):
 
        _tags = []
 
        for tname, tsha in self.repository.tags.items():
 
            if tsha == self.raw_id:
 
                _tags.append(tname)
 
        return _tags
 

	
 
    @LazyProperty
 
    def branch(self):
 
        # Note: This function will return one branch name for the changeset -
 
        # that might not make sense in Git where branches() is a better match
 
        # for the basic model
 
        heads = self.repository._heads(reverse=False)
 
        ref = heads.get(self._commit.id)
 
        if ref:
 
            return safe_str(ref)
 

	
 
    @LazyProperty
 
    def branches(self):
 
        heads = self.repository._heads(reverse=True)
 
        return [safe_str(b) for b in heads if heads[b] == self._commit.id] # FIXME: Inefficient ... and returning None!
 

	
 
    def _get_id_for_path(self, path):
 
        # FIXME: Please, spare a couple of minutes and make those codes cleaner;
 
        if path not in self._paths:
 
            path = path.strip('/')
 
            # set root tree
 
            tree = self.repository._repo[self._tree_id]
 
            if path == '':
 
                self._paths[''] = tree.id
 
                return tree.id
 
            splitted = path.split('/')
 
            dirs, name = splitted[:-1], splitted[-1]
 
            curdir = ''
 

	
 
            # initially extract things from root dir
 
            for item, stat, id in tree.items():
 
                name = safe_str(item)
 
                if curdir:
 
                    name = '/'.join((curdir, name))
 
                self._paths[name] = id
 
                self._stat_modes[name] = stat
 

	
 
            for dir in dirs:
 
                if curdir:
 
                    curdir = '/'.join((curdir, dir))
 
                else:
 
                    curdir = dir
 
                dir_id = None
 
                for item, stat, id in tree.items():
 
                    name = safe_str(item)
 
                    if dir == name:
 
                        dir_id = id
 
                if dir_id:
 
                    # Update tree
 
                    tree = self.repository._repo[dir_id]
 
                    if not isinstance(tree, objects.Tree):
 
                        raise ChangesetError('%s is not a directory' % curdir)
 
                else:
 
                    raise ChangesetError('%s have not been found' % curdir)
 

	
 
                # cache all items from the given traversed tree
 
                for item, stat, id in tree.items():
 
                    name = safe_str(item)
 
                    if curdir:
 
                        name = '/'.join((curdir, name))
 
                    self._paths[name] = id
 
                    self._stat_modes[name] = stat
 
            if path not in self._paths:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path '%s' at revision %s"
 
                    % (path, self.short_id))
 
        return self._paths[path]
 

	
 
    def _get_kind(self, path):
 
        obj = self.repository._repo[self._get_id_for_path(path)]
 
        if isinstance(obj, objects.Blob):
 
            return NodeKind.FILE
 
        elif isinstance(obj, objects.Tree):
 
            return NodeKind.DIR
 

	
 
    def _get_filectx(self, path):
 
        path = path.rstrip('/')
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return path
 

	
 
    def _get_file_nodes(self):
 
        return chain(*(t[2] for t in self.walk()))
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(ascii_str(parent_id))
 
                for parent_id in self._commit.parents]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        rev_filter = settings.GIT_REV_FILTER
 
        so = self.repository.run_git_command(
 
            ['rev-list', rev_filter, '--children']
 
        )
 
        return [
 
            self.repository.get_changeset(cs)
 
            for parts in (l.split(' ') for l in so.splitlines())
 
            if parts[0] == self.raw_id
 
            for cs in parts[1:]
 
        ]
 

	
 
    def next(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                next_ = cs.revision + 1
 
                next_rev = cs.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(next_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                prev_ = cs.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self, ignore_whitespace=True, context=3):
 
        # Only used to feed diffstat
 
        rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
 
        rev2 = self
 
        return b''.join(self.repository.get_diff(rev1, rev2,
 
                                    ignore_whitespace=ignore_whitespace,
 
                                    context=context))
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        # ensure path is traversed
 
        self._get_id_for_path(path)
 
        return self._stat_modes[path]
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.as_pretty_string()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.raw_length()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        return self.get_file_history(path, limit=1)[0]
 

	
 
    def get_file_history(self, path, limit=None):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        TODO: This function now uses os underlying 'git' and 'grep' commands
 
        which is generally not good. Should be replaced with algorithm
 
        iterating commits.
 
        """
 
        self._get_filectx(path)
 

	
 
        if limit is not None:
 
            cmd = ['log', '-n', str(safe_int(limit, 0)),
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 

	
 
        else:
 
            cmd = ['log',
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 
        so = self.repository.run_git_command(cmd)
 
        ids = re.findall(r'[0-9a-fA-F]{40}', so)
 
        return [self.repository.get_changeset(sha) for sha in ids]
 

	
 
    def get_file_history_2(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        """
 
        self._get_filectx(path)
 
        from dulwich.walk import Walker
 
        include = [self.raw_id]
 
        walker = Walker(self.repository._repo.object_store, include,
 
                        paths=[path], max_entries=1)
 
        return [self.repository.get_changeset(ascii_str(x.commit.id.decode))
 
                for x in walker]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 
        """
 
        # TODO: This function now uses os underlying 'git' command which is
 
        # generally not good. Should be replaced with algorithm iterating
 
        # commits.
 
        cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
 
        # -l     ==> outputs long shas (and we need all 40 characters)
 
        # --root ==> doesn't put '^' character for boundaries
 
        # -r sha ==> blames for the given revision
 
        so = self.repository.run_git_command(cmd)
 

	
 
        for i, blame_line in enumerate(so.split('\n')[:-1]):
 
            sha, line = re.split(r' ', blame_line, 1)
 
            yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 
        """
 
        allowed_kinds = settings.ARCHIVE_SPECS
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s' % ' '.join(allowed_kinds))
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        if kind == 'zip':
 
            frmt = 'zip'
 
        else:
 
            frmt = 'tar'
 
        _git_path = settings.GIT_EXECUTABLE_PATH
 
        cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
 
                                                frmt, prefix, self.raw_id)
 
        if kind == 'tgz':
 
            cmd += ' | gzip -9'
 
        elif kind == 'tbz2':
 
            cmd += ' | bzip2 -9'
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 
        popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
 
                      cwd=self.repository.path)
 

	
 
        buffer_size = 1024 * 8
 
        chunk = popen.stdout.read(buffer_size)
 
        while chunk:
 
            stream.write(chunk)
 
            chunk = popen.stdout.read(buffer_size)
 
        # Make sure all descriptors would be read
 
        popen.communicate()
 

	
 
    def get_nodes(self, path):
 
        """
 
        Returns combined ``DirNode`` and ``FileNode`` objects list representing
 
        state of changeset at the given ``path``. If node at the given ``path``
 
        is not instance of ``DirNode``, ChangesetError would be raised.
 
        """
 

	
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %s at "
 
                " '%s'" % (self.revision, path))
 
        path = path.rstrip('/')
 
        id = self._get_id_for_path(path)
 
        tree = self.repository._repo[id]
 
        dirnodes = []
 
        filenodes = []
 
        als = self.repository.alias
 
        for name, stat, id in tree.items():
 
            obj_path = safe_str(name)
 
            if path != '':
 
                obj_path = '/'.join((path, obj_path))
 
            if objects.S_ISGITLINK(stat):
 
                root_tree = self.repository._repo[self._tree_id]
 
                cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree[b'.gitmodules'][1]).data))
 
                url = ascii_str(cf.get(('submodule', obj_path), 'url'))
 
                dirnodes.append(SubModuleNode(obj_path, url=url, changeset=ascii_str(id),
 
                                              alias=als))
 
                continue
 

	
 
            obj = self.repository._repo.get_object(id)
 
            if obj_path not in self._stat_modes:
 
                self._stat_modes[obj_path] = stat
 
            if isinstance(obj, objects.Tree):
 
                dirnodes.append(DirNode(obj_path, changeset=self))
 
            elif isinstance(obj, objects.Blob):
 
                filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
 
            else:
 
                raise ChangesetError("Requested object should be Tree "
 
                                     "or Blob, is %r" % type(obj))
 
        nodes = dirnodes + filenodes
 
        for node in nodes:
 
            if node.path not in self.nodes:
 
                self.nodes[node.path] = node
 
        nodes.sort()
 
        return nodes
 

	
 
    def get_node(self, path):
 
        """
 
        Returns ``Node`` object from the given ``path``. If there is no node at
 
        the given ``path``, ``ChangesetError`` would be raised.
 
        """
 
        path = path.rstrip('/')
 
        if path not in self.nodes:
 
            try:
 
                id_ = self._get_id_for_path(path)
 
            except ChangesetError:
 
                raise NodeDoesNotExistError("Cannot find one of parents' "
 
                    "directories for a given path: %s" % path)
 

	
 
            _GL = lambda m: m and objects.S_ISGITLINK(m)
 
            if _GL(self._stat_modes.get(path)):
 
            stat = self._stat_modes.get(path)
 
            if stat and objects.S_ISGITLINK(stat):
 
                tree = self.repository._repo[self._tree_id]
 
                cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree[b'.gitmodules'][1]).data))
 
                url = ascii_str(cf.get(('submodule', path), 'url'))
 
                node = SubModuleNode(path, url=url, changeset=ascii_str(id_),
 
                                     alias=self.repository.alias)
 
            else:
 
                obj = self.repository._repo.get_object(id_)
 

	
 
                if isinstance(obj, objects.Tree):
 
                    if path == '':
 
                        node = RootNode(changeset=self)
 
                    else:
 
                        node = DirNode(path, changeset=self)
 
                    node._tree = obj
 
                elif isinstance(obj, objects.Blob):
 
                    node = FileNode(path, changeset=self)
 
                    node._blob = obj
 
                else:
 
                    raise NodeDoesNotExistError("There is no file nor directory "
 
                        "at the given path: '%s' at revision %s"
 
                        % (path, self.short_id))
 
            # cache node
 
            self.nodes[path] = node
 
        return self.nodes[path]
 

	
 
    @LazyProperty
 
    def affected_files(self):
 
        """
 
        Gets a fast accessible file changes for given changeset
 
        """
 
        added, modified, deleted = self._changes_cache
 
        return list(added.union(modified).union(deleted))
 

	
 
    @LazyProperty
 
    def _changes_cache(self):
 
        added = set()
 
        modified = set()
 
        deleted = set()
 
        _r = self.repository._repo
 

	
 
        parents = self.parents
 
        if not self.parents:
 
            parents = [EmptyChangeset()]
 
        for parent in parents:
 
            if isinstance(parent, EmptyChangeset):
 
                oid = None
 
            else:
 
                oid = _r[parent._commit.id].tree
 
            changes = _r.object_store.tree_changes(oid, _r[self._commit.id].tree)
 
            for (oldpath, newpath), (_, _), (_, _) in changes:
 
                if newpath and oldpath:
 
                    modified.add(safe_str(newpath))
 
                elif newpath and not oldpath:
 
                    added.add(safe_str(newpath))
 
                elif not newpath and oldpath:
 
                    deleted.add(safe_str(oldpath))
 
        return added, modified, deleted
 

	
 
    def _get_paths_for_status(self, status):
 
        """
 
        Returns sorted list of paths for given ``status``.
 

	
 
        :param status: one of: *added*, *modified* or *deleted*
 
        """
 
        added, modified, deleted = self._changes_cache
 
        return sorted({
 
            'added': list(added),
 
            'modified': list(modified),
 
            'deleted': list(deleted)}[status]
 
        )
 

	
 
    @LazyProperty
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return list(self._get_file_nodes())
 
        return AddedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('added')], self)
 

	
 
    @LazyProperty
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return ChangedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('modified')], self)
 

	
 
    @LazyProperty
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return RemovedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('deleted')], self)
 

	
 
    extra = {}
kallithea/lib/vcs/backends/git/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.git.repository
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Git repository implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import errno
 
import logging
 
import os
 
import re
 
import time
 
import urllib.error
 
import urllib.parse
 
import urllib.request
 
from collections import OrderedDict
 

	
 
import mercurial.url  # import httpbasicauthhandler, httpdigestauthhandler
 
import mercurial.util  # import url as hg_url
 
from dulwich.config import ConfigFile
 
from dulwich.objects import Tag
 
from dulwich.repo import NotGitRepository, Repo
 

	
 
from kallithea.lib.vcs import subprocessio
 
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError)
 
from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import abspath, get_user_home
 

	
 
from .changeset import GitChangeset
 
from .inmemory import GitInMemoryChangeset
 
from .workdir import GitWorkdir
 

	
 

	
 
SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitRepository(BaseRepository):
 
    """
 
    Git repository backend.
 
    """
 
    DEFAULT_BRANCH_NAME = 'master'
 
    scm = 'git'
 

	
 
    def __init__(self, repo_path, create=False, src_url=None,
 
                 update_after_clone=False, bare=False):
 

	
 
        self.path = abspath(repo_path)
 
        self.repo = self._get_repo(create, src_url, update_after_clone, bare)
 
        self.bare = self.repo.bare
 

	
 
    @property
 
    def _config_files(self):
 
        return [
 
            self.bare and abspath(self.path, 'config')
 
                      or abspath(self.path, '.git', 'config'),
 
             abspath(get_user_home(), '.gitconfig'),
 
         ]
 

	
 
    @property
 
    def _repo(self):
 
        return self.repo
 

	
 
    @property
 
    def head(self):
 
        try:
 
            return self._repo.head()
 
        except KeyError:
 
            return None
 

	
 
    @property
 
    def _empty(self):
 
        """
 
        Checks if repository is empty ie. without any changesets
 
        """
 

	
 
        try:
 
            self.revisions[0]
 
        except (KeyError, IndexError):
 
            return True
 
        return False
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
        """
 
        return self._get_all_revisions()
 

	
 
    @classmethod
 
    def _run_git_command(cls, cmd, cwd=None):
 
        """
 
        Runs given ``cmd`` as git command and returns output bytes in a tuple
 
        (stdout, stderr) ... or raise RepositoryError.
 

	
 
        :param cmd: git command to be executed
 
        :param cwd: passed directly to subprocess
 
        """
 
        # need to clean fix GIT_DIR !
 
        gitenv = dict(os.environ)
 
        gitenv.pop('GIT_DIR', None)
 
        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 

	
 
        assert isinstance(cmd, list), cmd
 
        cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd
 
        try:
 
            p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False)
 
        except (EnvironmentError, OSError) as err:
 
            # output from the failing process is in str(EnvironmentError)
 
            msg = ("Couldn't run git command %s.\n"
 
                   "Subprocess failed with '%s': %s\n" %
 
                   (cmd, type(err).__name__, err)
 
            ).strip()
 
            log.error(msg)
 
            raise RepositoryError(msg)
 

	
 
        try:
 
            stdout = b''.join(p.output)
 
            stderr = b''.join(p.error)
 
        finally:
 
            p.close()
 
        # TODO: introduce option to make commands fail if they have any stderr output?
 
        if stderr:
 
            log.debug('stderr from %s:\n%s', cmd, stderr)
 
        else:
 
            log.debug('stderr from %s: None', cmd)
 
        return stdout, stderr
 

	
 
    def run_git_command(self, cmd):
 
        """
 
        Runs given ``cmd`` as git command with cwd set to current repo.
 
        Returns stdout as unicode str ... or raise RepositoryError.
 
        """
 
        cwd = None
 
        if os.path.isdir(self.path):
 
            cwd = self.path
 
        stdout, _stderr = self._run_git_command(cmd, cwd=cwd)
 
        return safe_str(stdout)
 

	
 
    @classmethod
 
    def _check_url(cls, url):
 
        """
 
        Function will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that git will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError, exception is also thrown
 
        when the return code is non 200
 
        """
 
        # check first if it's not an local url
 
        if os.path.isdir(url) or url.startswith('file:'):
 
            return True
 

	
 
        if url.startswith('git://'):
 
            return True
 

	
 
        if '+' in url[:url.find('://')]:
 
            url = url[url.find('+') + 1:]
 

	
 
        handlers = []
 
        url_obj = mercurial.util.url(safe_bytes(url))
 
        test_uri, authinfo = url_obj.authinfo()
 
        if not test_uri.endswith('info/refs'):
 
            test_uri = test_uri.rstrip('/') + '/info/refs'
 

	
 
        url_obj.passwd = b'*****'
 
        cleaned_uri = str(url_obj)
 

	
 
        if authinfo:
 
            # create a password manager
 
            passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
 
                             mercurial.url.httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib.request.build_opener(*handlers)
 
        o.addheaders = [('User-Agent', 'git/1.7.8.0')]  # fake some git
 

	
 
        req = urllib.request.Request(
 
            "%s?%s" % (
 
                test_uri,
 
                urllib.parse.urlencode({"service": 'git-upload-pack'})
 
            ))
 

	
 
        try:
 
            resp = o.open(req)
 
            if resp.code != 200:
 
                raise Exception('Return Code is not 200')
 
        except Exception as e:
 
            # means it cannot be cloned
 
            raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 

	
 
        # now detect if it's proper git repo
 
        gitdata = resp.read()
 
        if 'service=git-upload-pack' not in gitdata:
 
            raise urllib.error.URLError(
 
                "url [%s] does not look like an git" % cleaned_uri)
 

	
 
        return True
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False,
 
                  bare=False):
 
        if create and os.path.exists(self.path):
 
            raise RepositoryError("Location already exist")
 
        if src_url and not create:
 
            raise RepositoryError("Create should be set to True if src_url is "
 
                                  "given (clone operation creates repository)")
 
        try:
 
            if create and src_url:
 
                GitRepository._check_url(src_url)
 
                self.clone(src_url, update_after_clone, bare)
 
                return Repo(self.path)
 
            elif create:
 
                os.makedirs(self.path)
 
                if bare:
 
                    return Repo.init_bare(self.path)
 
                else:
 
                    return Repo.init(self.path)
 
            else:
 
                return Repo(self.path)
 
        except (NotGitRepository, OSError) as err:
 
            raise RepositoryError(err)
 

	
 
    def _get_all_revisions(self):
 
        # we must check if this repo is not empty, since later command
 
        # fails if it is. And it's cheaper to ask than throw the subprocess
 
        # errors
 
        try:
 
            self._repo.head()
 
        except KeyError:
 
            return []
 

	
 
        rev_filter = settings.GIT_REV_FILTER
 
        cmd = ['rev-list', rev_filter, '--reverse', '--date-order']
 
        try:
 
            so = self.run_git_command(cmd)
 
        except RepositoryError:
 
            # Can be raised for empty repositories
 
            return []
 
        return so.splitlines()
 

	
 
    def _get_all_revisions2(self):
 
        # alternate implementation using dulwich
 
        includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items()
 
                    if type_ != b'T']
 
        return [c.commit.id for c in self._repo.get_walker(include=includes)]
 

	
 
    def _get_revision(self, revision):
 
        """
 
        Given any revision identifier, returns a 40 char string with revision hash.
 
        """
 
        if self._empty:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in (None, '', 'tip', 'HEAD', 'head', -1):
 
            revision = -1
 

	
 
        if isinstance(revision, int):
 
            try:
 
                return self.revisions[revision]
 
            except IndexError:
 
                msg = "Revision %r does not exist for %s" % (revision, self.name)
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        if isinstance(revision, str):
 
            if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')):
 
                try:
 
                    return self.revisions[int(revision)]
 
                except IndexError:
 
                    msg = "Revision %r does not exist for %s" % (revision, self)
 
                    raise ChangesetDoesNotExistError(msg)
 

	
 
            # get by branch/tag name
 
            _ref_revision = self._parsed_refs.get(safe_bytes(revision))
 
            if _ref_revision:  # and _ref_revision[1] in [b'H', b'RH', b'T']:
 
                return ascii_str(_ref_revision[0])
 

	
 
            if revision in self.revisions:
 
                return revision
 

	
 
            # maybe it's a tag ? we don't have them in self.revisions
 
            if revision in self.tags.values():
 
                return revision
 

	
 
            if SHA_PATTERN.match(revision):
 
                msg = "Revision %r does not exist for %s" % (revision, self.name)
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision)
 

	
 
    def get_ref_revision(self, ref_type, ref_name):
 
        """
 
        Returns ``GitChangeset`` object representing repository's
 
        changeset at the given ``revision``.
 
        """
 
        return self._get_revision(ref_name)
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
            yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        if url != 'default' and '://' not in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            idx_loc = '' if self.bare else '.git'
 
            # fallback to filesystem
 
            in_path = os.path.join(self.path, idx_loc, "index")
 
            he_path = os.path.join(self.path, idx_loc, "HEAD")
 
            if os.path.exists(in_path):
 
                return os.stat(in_path).st_mtime
 
            else:
 
                return os.stat(he_path).st_mtime
 

	
 
    @LazyProperty
 
    def description(self):
 
        return safe_str(self._repo.get_description() or b'unknown')
 

	
 
    @LazyProperty
 
    def contact(self):
 
        undefined_contact = 'Unknown'
 
        return undefined_contact
 

	
 
    @property
 
    def branches(self):
 
        if not self.revisions:
 
            return {}
 
        sortkey = lambda ctx: ctx[0]
 
        _branches = [(safe_str(key), ascii_str(sha))
 
                     for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H']
 
        return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
 
        return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False))
 

	
 
    @LazyProperty
 
    def closed_branches(self):
 
        return {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return self._get_tags()
 

	
 
    def _get_tags(self):
 
        if not self.revisions:
 
            return {}
 

	
 
        sortkey = lambda ctx: ctx[0]
 
        _tags = [(safe_str(key), ascii_str(sha))
 
                 for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T']
 
        return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
 
        return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True))
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None,
 
            **kwargs):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        if name in self.tags:
 
            raise TagAlreadyExistError("Tag %s already exists" % name)
 
        changeset = self.get_changeset(revision)
 
        message = message or "Added tag %s for commit %s" % (name,
 
            changeset.raw_id)
 
        self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id
 

	
 
        self._parsed_refs = self._get_parsed_refs()
 
        self.tags = self._get_tags()
 
        return changeset
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        if name not in self.tags:
 
            raise TagDoesNotExistError("Tag %s does not exist" % name)
 
        # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git'
 
        tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name)
 
        try:
 
            os.remove(tagpath)
 
            self._parsed_refs = self._get_parsed_refs()
 
            self.tags = self._get_tags()
 
        except OSError as e:
 
            raise RepositoryError(e.strerror)
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        """
 
        Gets bookmarks for this repository
 
        """
 
        return {}
 

	
 
    @LazyProperty
 
    def _parsed_refs(self):
 
        return self._get_parsed_refs()
 

	
 
    def _get_parsed_refs(self):
 
        """Return refs as a dict, like:
 
        { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] }
 
        """
 
        _repo = self._repo
 
        refs = _repo.get_refs()
 
        keys = [(b'refs/heads/', b'H'),
 
                (b'refs/remotes/origin/', b'RH'),
 
                (b'refs/tags/', b'T')]
 
        _refs = {}
 
        for ref, sha in refs.items():
 
            for k, type_ in keys:
 
                if ref.startswith(k):
 
                    _key = ref[len(k):]
 
                    if type_ == b'T':
 
                        obj = _repo.get_object(sha)
 
                        if isinstance(obj, Tag):
 
                            sha = _repo.get_object(sha).object[1]
 
                    _refs[_key] = [sha, type_]
 
                    break
 
        return _refs
 

	
 
    def _heads(self, reverse=False):
 
        refs = self._repo.get_refs()
 
        heads = {}
 

	
 
        for key, val in refs.items():
 
            for ref_key in [b'refs/heads/', b'refs/remotes/origin/']:
 
                if key.startswith(ref_key):
 
                    n = key[len(ref_key):]
 
                    if n not in [b'HEAD']:
 
                        heads[n] = val
 

	
 
        return heads if reverse else dict((y, x) for x, y in heads.items())
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns ``GitChangeset`` object representing commit from git repository
 
        at the given revision or head (most recent commit) if None given.
 
        """
 
        if isinstance(revision, GitChangeset):
 
            return revision
 
        return GitChangeset(repository=self, revision=self._get_revision(revision))
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
           end_date=None, branch_name=None, reverse=False, max_revisions=None):
 
        """
 
        Returns iterator of ``GitChangeset`` objects from start to end (both
 
        are inclusive), in ascending date order (unless ``reverse`` is set).
 

	
 
        :param start: changeset ID, as str; first returned changeset
 
        :param end: changeset ID, as str; last returned changeset
 
        :param start_date: if specified, changesets with commit date less than
 
          ``start_date`` would be filtered out from returned set
 
        :param end_date: if specified, changesets with commit date greater than
 
          ``end_date`` would be filtered out from returned set
 
        :param branch_name: if specified, changesets not reachable from given
 
          branch would be filtered out from returned set
 
        :param reverse: if ``True``, returned generator would be reversed
 
          (meaning that returned changesets would have descending date order)
 

	
 
        :raise BranchDoesNotExistError: If given ``branch_name`` does not
 
            exist.
 
        :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
 
          ``end`` could not be found.
 

	
 
        """
 
        if branch_name and branch_name not in self.branches:
 
            raise BranchDoesNotExistError("Branch '%s' not found"
 
                                          % branch_name)
 
        # actually we should check now if it's not an empty repo to not spaw
 
        # subprocess commands
 
        if self._empty:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        # %H at format means (full) commit hash, initial hashes are retrieved
 
        # in ascending date order
 
        cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H']
 
        if max_revisions:
 
            cmd += ['--max-count=%s' % max_revisions]
 
        if start_date:
 
            cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')]
 
        if end_date:
 
            cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')]
 
        if branch_name:
 
            cmd.append(branch_name)
 
        else:
 
            cmd.append(settings.GIT_REV_FILTER)
 

	
 
        revs = self.run_git_command(cmd).splitlines()
 
        start_pos = 0
 
        end_pos = len(revs)
 
        if start:
 
            _start = self._get_revision(start)
 
            try:
 
                start_pos = revs.index(_start)
 
            except ValueError:
 
                pass
 

	
 
        if end is not None:
 
            _end = self._get_revision(end)
 
            try:
 
                end_pos = revs.index(_end)
 
            except ValueError:
 
                pass
 

	
 
        if None not in [start, end] and start_pos > end_pos:
 
            raise RepositoryError('start cannot be after end')
 

	
 
        if end_pos is not None:
 
            end_pos += 1
 

	
 
        revs = revs[start_pos:end_pos]
 
        if reverse:
 
            revs.reverse()
 

	
 
        return CollectionGenerator(self, revs)
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
                 context=3):
 
        """
 
        Returns (git like) *diff*, as plain bytes text. Shows changes
 
        introduced by ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``. Due to limitations in Git, if
 
          value passed-in is greater than ``2**31-1``
 
          (``2147483647``), it will be set to ``2147483647``
 
          instead. If negative value is passed-in, it will be set to
 
          ``0`` instead.
 
        """
 

	
 
        # Git internally uses a signed long int for storing context
 
        # size (number of lines to show before and after the
 
        # differences). This can result in integer overflow, so we
 
        # ensure the requested context is smaller by one than the
 
        # number that would cause the overflow. It is highly unlikely
 
        # that a single file will contain that many lines, so this
 
        # kind of change should not cause any realistic consequences.
 
        overflowed_long_int = 2**31
 

	
 
        if context >= overflowed_long_int:
 
            context = overflowed_long_int - 1
 

	
 
        # Negative context values make no sense, and will result in
 
        # errors. Ensure this does not happen.
 
        if context < 0:
 
            context = 0
 

	
 
        flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
 
        if ignore_whitespace:
 
            flags.append('-w')
 

	
 
        if hasattr(rev1, 'raw_id'):
 
            rev1 = getattr(rev1, 'raw_id')
 

	
 
        if hasattr(rev2, 'raw_id'):
 
            rev2 = getattr(rev2, 'raw_id')
 

	
 
        if rev1 == self.EMPTY_CHANGESET:
 
            rev2 = self.get_changeset(rev2).raw_id
 
            cmd = ['show'] + flags + [rev2]
 
        else:
 
            rev1 = self.get_changeset(rev1).raw_id
 
            rev2 = self.get_changeset(rev2).raw_id
 
            cmd = ['diff'] + flags + [rev1, rev2]
 

	
 
        if path:
 
            cmd += ['--', path]
 

	
 
        stdout, stderr = self._run_git_command(cmd, cwd=self.path)
 
        # If we used 'show' command, strip first few lines (until actual diff
 
        # starts)
 
        if rev1 == self.EMPTY_CHANGESET:
 
            parts = stdout.split(b'\ndiff ', 1)
 
            if len(parts) > 1:
 
                stdout = b'diff ' + parts[1]
 
        return stdout
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        """
 
        Returns ``GitInMemoryChangeset`` object for this repository.
 
        """
 
        return GitInMemoryChangeset(self)
 

	
 
    def clone(self, url, update_after_clone=True, bare=False):
 
        """
 
        Tries to clone changes from external location.
 

	
 
        :param update_after_clone: If set to ``False``, git won't checkout
 
          working directory
 
        :param bare: If set to ``True``, repository would be cloned into
 
          *bare* git repository (no working directory at all).
 
        """
 
        url = self._get_url(url)
 
        cmd = ['clone', '-q']
 
        if bare:
 
            cmd.append('--bare')
 
        elif not update_after_clone:
 
            cmd.append('--no-checkout')
 
        cmd += ['--', url, self.path]
 
        # If error occurs run_git_command raises RepositoryError already
 
        self.run_git_command(cmd)
 

	
 
    def pull(self, url):
 
        """
 
        Tries to pull changes from external location.
 
        """
 
        url = self._get_url(url)
 
        cmd = ['pull', '--ff-only', url]
 
        # If error occurs run_git_command raises RepositoryError already
 
        self.run_git_command(cmd)
 

	
 
    def fetch(self, url):
 
        """
 
        Tries to pull changes from external location.
 
        """
 
        url = self._get_url(url)
 
        so = self.run_git_command(['ls-remote', '-h', url])
 
        cmd = ['fetch', url, '--']
 
        for line in (x for x in so.splitlines()):
 
            sha, ref = line.split('\t')
 
            cmd.append('+%s:%s' % (ref, ref))
 
        self.run_git_command(cmd)
 

	
 
    def _update_server_info(self):
 
        """
 
        runs gits update-server-info command in this repo instance
 
        """
 
        from dulwich.server import update_server_info
 
        try:
 
            update_server_info(self._repo)
 
        except OSError as e:
 
            if e.errno not in [errno.ENOENT, errno.EROFS]:
 
                raise
 
            # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock
 
            log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e)
 

	
 
    @LazyProperty
 
    def workdir(self):
 
        """
 
        Returns ``Workdir`` instance for this repository.
 
        """
 
        return GitWorkdir(self)
 

	
 
    def get_config_value(self, section, name, config_file=None):
 
        """
 
        Returns configuration value for a given [``section``] and ``name``.
 

	
 
        :param section: Section we want to retrieve value from
 
        :param name: Name of configuration we want to retrieve
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        if config_file is None:
 
            config_file = []
 
        elif isinstance(config_file, str):
 
            config_file = [config_file]
 

	
 
        def gen_configs():
 
            for path in config_file + self._config_files:
 
                try:
 
                    yield ConfigFile.from_path(path)
 
                except (IOError, OSError, ValueError):
 
                    continue
 

	
 
        for config in gen_configs():
 
            try:
 
                value = config.get(section, name)
 
            except KeyError:
 
                continue
 
            return None if value is None else safe_str(value)
 
        return None
 

	
 
    def get_user_name(self, config_file=None):
 
        """
 
        Returns user's name from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        return self.get_config_value('user', 'name', config_file)
 

	
 
    def get_user_email(self, config_file=None):
 
        """
 
        Returns user's email from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        return self.get_config_value('user', 'email', config_file)
kallithea/lib/vcs/conf/settings.py
Show inline comments
 
import os
 
import tempfile
 

	
 
from kallithea.lib.vcs.utils import aslist
 
from kallithea.lib.vcs.utils.paths import get_user_home
 

	
 

	
 
abspath = lambda * p: os.path.abspath(os.path.join(*p))
 
def abspath(*p):
 
    return os.path.abspath(os.path.join(*p))
 

	
 
VCSRC_PATH = os.environ.get('VCSRC_PATH')
 

	
 
if not VCSRC_PATH:
 
    HOME_ = get_user_home()
 
    if not HOME_:
 
        HOME_ = tempfile.gettempdir()
 

	
 
VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc')
 
if os.path.isdir(VCSRC_PATH):
 
    VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py')
 

	
 
# list of default encoding used in safe_str/safe_bytes methods
 
DEFAULT_ENCODINGS = aslist('utf-8')
 

	
 
# path to git executable run by run_git_command function
 
GIT_EXECUTABLE_PATH = 'git'
 
# can be also --branches --tags
 
GIT_REV_FILTER = '--all'
 

	
 
BACKENDS = {
 
    'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
 
    'git': 'kallithea.lib.vcs.backends.git.GitRepository',
 
}
 

	
 
ARCHIVE_SPECS = {
 
    'tar': ('application/x-tar', '.tar'),
 
    'tbz2': ('application/x-bzip2', '.tar.bz2'),
 
    'tgz': ('application/x-gzip', '.tar.gz'),
 
    'zip': ('application/zip', '.zip'),
 
}
kallithea/lib/vcs/utils/paths.py
Show inline comments
 
import os
 

	
 

	
 
abspath = lambda * p: os.path.abspath(os.path.join(*p))
 
def abspath(*p):
 
    return os.path.abspath(os.path.join(*p))
 

	
 

	
 
def get_dirs_for_path(*paths):
 
    """
 
    Returns list of directories, including intermediate.
 
    """
 
    for path in paths:
 
        head = path
 
        while head:
 
            head, _tail = os.path.split(head)
 
            if head:
 
                yield head
 
            else:
 
                # We don't need to yield empty path
 
                break
 

	
 

	
 
def get_dir_size(path):
 
    root_path = path
 
    size = 0
 
    for path, dirs, files in os.walk(root_path):
 
        for f in files:
 
            try:
 
                size += os.path.getsize(os.path.join(path, f))
 
            except OSError:
 
                pass
 
    return size
 

	
 

	
 
def get_user_home():
 
    """
 
    Returns home path of the user.
 
    """
 
    return os.getenv('HOME', os.getenv('USERPROFILE')) or ''
kallithea/model/repo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
import os
 
import shutil
 
import traceback
 
from datetime import datetime
 

	
 
import kallithea.lib.utils2
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.lib.hooks import log_delete_repository
 
from kallithea.lib.utils import is_valid_repo_uri, make_ui
 
from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.model.db import (
 
    Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(object):
 

	
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
        repo_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        repo_to_perm.repository = repository
 
        repo_to_perm.user_id = def_user.user_id
 
        Session().add(repo_to_perm)
 

	
 
        return repo_to_perm
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = Ui.query().filter(Ui.ui_key == '/').one()
 
        return q.ui_value
 

	
 
    def get(self, repo_id, cache=False):
 
        repo = Repository.query() \
 
            .filter(Repository.repo_id == repo_id)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_id))
 
        return repo.scalar()
 

	
 
    def get_repo(self, repository):
 
        return Repository.guess_instance(repository)
 

	
 
    def get_by_repo_name(self, repo_name, cache=False):
 
        repo = Repository.query() \
 
            .filter(Repository.repo_name == repo_name)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_name))
 
        return repo.scalar()
 

	
 
    def get_all_user_repos(self, user):
 
        """
 
        Gets all repositories that user have at least read access
 

	
 
        :param user:
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        user = User.guess_instance(user)
 
        repos = AuthUser(dbuser=user).permissions['repositories']
 
        access_check = lambda r: r[1] in ['repository.read',
 
                                          'repository.write',
 
                                          'repository.admin']
 
        repos = [x[0] for x in filter(access_check, repos.items())]
 
        auth_user = AuthUser(dbuser=User.guess_instance(user))
 
        repos = [repo_name
 
            for repo_name, perm in auth_user.permissions['repositories'].items()
 
            if perm in ['repository.read', 'repository.write', 'repository.admin']
 
            ]
 
        return Repository.query().filter(Repository.repo_name.in_(repos))
 

	
 
    @classmethod
 
    def _render_datatable(cls, tmpl, *args, **kwargs):
 
        from tg import tmpl_context as c, request, app_globals
 
        from tg.i18n import ugettext as _
 

	
 
        _tmpl_lookup = app_globals.mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        tmpl = template.get_def(tmpl)
 
        kwargs.update(dict(_=_, h=h, c=c, request=request))
 
        return tmpl.render_unicode(*args, **kwargs)
 

	
 
    def get_repos_as_dict(self, repos_list, repo_groups_list=None,
 
                          admin=False,
 
                          short_name=False):
 
        """Return repository list for use by DataTable.
 
        repos_list: list of repositories - but will be filtered for read permission.
 
        repo_groups_list: added at top of list without permission check.
 
        admin: return data for action column.
 
        """
 
        _render = self._render_datatable
 
        from tg import tmpl_context as c, request
 
        from kallithea.model.scm import ScmModel
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=short_name)
 

	
 
        def following(repo_id, is_following):
 
            return _render('following', repo_id, is_following)
 

	
 
        def last_change(last_change):
 
            return _render("last_change", last_change)
 

	
 
        def rss_lnk(repo_name):
 
            return _render("rss", repo_name)
 

	
 
        def atom_lnk(repo_name):
 
            return _render("atom", repo_name)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels)
 

	
 
        def state(repo_state):
 
            return _render("repo_state", repo_state)
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name)
 

	
 
        def owner_actions(owner_id, username):
 
            return _render('user_name', owner_id, username)
 

	
 
        repos_data = []
 

	
 
        for gr in repo_groups_list or []:
 
            repos_data.append(dict(
 
                raw_name='\0' + gr.name, # sort before repositories
 
                just_name=gr.name,
 
                name=_render('group_name_html', group_name=gr.group_name, name=gr.name),
 
                desc=gr.group_description))
 

	
 
        for repo in repos_list:
 
            if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'):
 
                continue
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "raw_name": repo.repo_name,
 
                "just_name": repo.just_name,
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "following": following(
 
                    repo.repo_id,
 
                    ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id),
 
                ),
 
                "last_change_iso": repo.last_db_change.isoformat(),
 
                "last_change": last_change(repo.last_db_change),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "desc": desc(repo.description),
 
                "owner": h.person(repo.owner),
 
                "state": state(repo.repo_state),
 
                "rss": rss_lnk(repo.repo_name),
 
                "atom": atom_lnk(repo.repo_name),
 
            }
 
            if admin:
 
                row.update({
 
                    "action": repo_actions(repo.repo_name),
 
                    "owner": owner_actions(repo.owner_id,
 
                                           h.person(repo.owner))
 
                })
 
            repos_data.append(row)
 

	
 
        return {
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": repos_data
 
        }
 

	
 
    def _get_defaults(self, repo_name):
 
        """
 
        Gets information about repository, and returns a dict for
 
        usage in forms
 

	
 
        :param repo_name:
 
        """
 

	
 
        repo_info = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_info is None:
 
            return None
 

	
 
        defaults = repo_info.get_dict()
 
        defaults['repo_name'] = repo_info.just_name
 
        defaults['repo_group'] = repo_info.group_id
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.owner:
 
            defaults.update({'owner': repo_info.owner.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'owner': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = Repository.guess_instance(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'owner' in kwargs:
 
                cur_repo.owner = User.get_by_username(kwargs['owner'])
 

	
 
            if 'repo_group' in kwargs:
 
                assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
                cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name)
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                # clone_uri is modified - if given a value, check it is valid
 
                if clone_uri != '':
 
                    # will raise exception on error
 
                    is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui())
 
                cur_repo.clone_uri = clone_uri
 

	
 
            if 'repo_name' in kwargs:
 
                repo_name = kwargs['repo_name']
 
                if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
 
                    raise Exception('invalid repo name %s' % repo_name)
 
                cur_repo.repo_name = cur_repo.get_new_name(repo_name)
 

	
 
            # if private flag is set, reset default permission to NONE
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                # handle extra fields
 
            for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]:
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 

	
 
            if org_repo_name != cur_repo.repo_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
 
                     private=False, clone_uri=None, repo_group=None,
 
                     landing_rev='rev:tip', fork_of=None,
 
                     copy_fork_permissions=False, enable_statistics=False,
 
                     enable_downloads=False,
 
                     copy_group_permissions=False, state=Repository.STATE_PENDING):
 
        """
 
        Create repository inside database with PENDING state. This should only be
 
        executed by create() repo, with exception of importing existing repos.
 

	
 
        """
 
        from kallithea.model.scm import ScmModel
 

	
 
        owner = User.guess_instance(owner)
 
        fork_of = Repository.guess_instance(fork_of)
 
        repo_group = RepoGroup.guess_instance(repo_group)
 
        try:
 
            repo_name = repo_name
 
            description = description
 
            # repo name is just a name of repository
 
            # while repo_name_full is a full qualified name that is combined
 
            # with name and path of group
 
            repo_name_full = repo_name
 
            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
 
            if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
 
                raise Exception('invalid repo name %s' % repo_name)
 

	
 
            new_repo = Repository()
 
            new_repo.repo_state = state
 
            new_repo.enable_statistics = False
 
            new_repo.repo_name = repo_name_full
 
            new_repo.repo_type = repo_type
 
            new_repo.owner = owner
 
            new_repo.group = repo_group
 
            new_repo.description = description or repo_name
 
            new_repo.private = private
 
            if clone_uri:
 
                # will raise exception on error
 
                is_valid_repo_uri(repo_type, clone_uri, make_ui())
 
            new_repo.clone_uri = clone_uri
 
            new_repo.landing_rev = landing_rev
 

	
 
            new_repo.enable_statistics = enable_statistics
 
            new_repo.enable_downloads = enable_downloads
 

	
 
            if fork_of:
 
                parent_repo = fork_of
 
                new_repo.fork = parent_repo
 

	
 
            Session().add(new_repo)
 

	
 
            if fork_of and copy_fork_permissions:
 
                repo = fork_of
 
                user_perms = UserRepoToPerm.query() \
 
                    .filter(UserRepoToPerm.repository == repo).all()
 
                group_perms = UserGroupRepoToPerm.query() \
 
                    .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                for perm in user_perms:
 
                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 

	
 
                for perm in group_perms:
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                               perm.permission)
 

	
 
            elif repo_group and copy_group_permissions:
 

	
 
                user_perms = UserRepoGroupToPerm.query() \
 
                    .filter(UserRepoGroupToPerm.group == repo_group).all()
 

	
 
                group_perms = UserGroupRepoGroupToPerm.query() \
 
                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
 

	
 
                for perm in user_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 

	
 
                for perm in group_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 

	
 
            else:
 
                self._create_default_perms(new_repo, private)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id)
 
            # we need to flush here, in order to check if database won't
 
            # throw any exceptions, create filesystem dirs at the very end
 
            Session().flush()
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user):
 
        """
 
        Create repository using celery tasks
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks
 
        return tasks.create_repo(form_data, cur_user)
 

	
 
    def _update_permissions(self, repo, perms_new=None, perms_updates=None,
 
                            check_perms=True):
 
        if not perms_new:
 
            perms_new = []
 
        if not perms_updates:
 
            perms_updates = []
 

	
 
        # update permissions
 
        for member, perm, member_type in perms_updates:
 
            if member_type == 'user':
 
                # this updates existing one
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                # check if we have permissions to alter this usergroup's access
 
                if not check_perms or HasUserGroupPermissionLevel('read')(member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 
            # set new permissions
 
        for member, perm, member_type in perms_new:
 
            if member_type == 'user':
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                # check if we have permissions to alter this usergroup's access
 
                if not check_perms or HasUserGroupPermissionLevel('read')(member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 

	
 
    def create_fork(self, form_data, cur_user):
 
        """
 
        Simple wrapper into executing celery task for fork creation
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks
 
        return tasks.create_repo_fork(form_data, cur_user)
 

	
 
    def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
 
        """
 
        Delete given repository, forks parameter defines what do do with
 
        attached forks. Throws AttachedForksError if deleted repo has attached
 
        forks
 

	
 
        :param repo:
 
        :param forks: str 'delete' or 'detach'
 
        :param fs_remove: remove(archive) repo from filesystem
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        repo = Repository.guess_instance(repo)
 
        if repo is not None:
 
            if forks == 'detach':
 
                for r in repo.forks:
 
                    r.fork = None
 
            elif forks == 'delete':
 
                for r in repo.forks:
 
                    self.delete(r, forks='delete')
 
            elif [f for f in repo.forks]:
 
                raise AttachedForksError()
 

	
 
            old_repo_dict = repo.get_dict()
 
            try:
 
                Session().delete(repo)
 
                if fs_remove:
 
                    self._delete_filesystem_repo(repo)
 
                else:
 
                    log.debug('skipping removal from filesystem')
 
                log_delete_repository(old_repo_dict,
 
                                      deleted_by=cur_user)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
    def grant_user_permission(self, repo, user, perm):
 
        """
 
        Grant permission for user on given repository, or update existing one
 
        if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        user = User.guess_instance(user)
 
        repo = Repository.guess_instance(repo)
 
        permission = Permission.guess_instance(perm)
 

	
 
        # check if we have that permission already
 
        obj = UserRepoToPerm.query() \
 
            .filter(UserRepoToPerm.user == user) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserRepoToPerm()
 
            Session().add(obj)
 
        obj.repository = repo
 
        obj.user = user
 
        obj.permission = permission
 
        log.debug('Granted perm %s to %s on %s', perm, user, repo)
 
        return obj
 

	
 
    def revoke_user_permission(self, repo, user):
 
        """
 
        Revoke permission for user on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user = User.guess_instance(user)
 
        repo = Repository.guess_instance(repo)
 

	
 
        obj = UserRepoToPerm.query() \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .scalar()
 
        if obj is not None:
 
            Session().delete(obj)
 
            log.debug('Revoked perm on %s on %s', repo, user)
 

	
 
    def grant_user_group_permission(self, repo, group_name, perm):
 
        """
 
        Grant permission for user group on given repository, or update
 
        existing one if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        repo = Repository.guess_instance(repo)
 
        group_name = UserGroup.guess_instance(group_name)
 
        permission = Permission.guess_instance(perm)
 

	
 
        # check if we have that permission already
 
        obj = UserGroupRepoToPerm.query() \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .scalar()
 

	
 
        if obj is None:
 
            # create new
 
            obj = UserGroupRepoToPerm()
 
            Session().add(obj)
 

	
 
        obj.repository = repo
 
        obj.users_group = group_name
 
        obj.permission = permission
 
        log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, repo, group_name):
 
        """
 
        Revoke permission for user group on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        """
 
        repo = Repository.guess_instance(repo)
 
        group_name = UserGroup.guess_instance(group_name)
 

	
 
        obj = UserGroupRepoToPerm.query() \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .scalar()
 
        if obj is not None:
 
            Session().delete(obj)
 
            log.debug('Revoked perm to %s on %s', repo, group_name)
 

	
 
    def delete_stats(self, repo_name):
 
        """
 
        removes stats for given repo
 

	
 
        :param repo_name:
 
        """
 
        repo = Repository.guess_instance(repo_name)
 
        try:
 
            obj = Statistics.query() \
 
                .filter(Statistics.repository == repo).scalar()
 
            if obj is not None:
 
                Session().delete(obj)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
 
                                clone_uri=None, repo_store_location=None):
 
        """
 
        Makes repository on filesystem. Operation is group aware, meaning that it will create
 
        a repository within a group, and alter the paths accordingly to the group location.
 

	
 
        Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning
 
        """
 
        from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
 
        from kallithea.model.scm import ScmModel
 

	
 
        if '/' in repo_name:
 
            raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
 
        else:
 
            new_parent_path = repo_group or ''
 

	
 
        if repo_store_location:
 
            _paths = [repo_store_location]
 
        else:
 
            _paths = [self.repos_path, new_parent_path, repo_name]
 
        repo_path = os.path.join(*_paths)
 

	
 
        # check if this path is not a repository
 
        if is_valid_repo(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid repository' % repo_path)
 

	
 
        # check if this path is a group
 
        if is_valid_repo_group(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid group' % repo_path)
 

	
 
        log.info('creating repo %s in %s from url: `%s`',
 
            repo_name, repo_path,
 
            obfuscate_url_pw(clone_uri))
 

	
 
        backend = get_backend(repo_type)
 

	
 
        if repo_type == 'hg':
 
            baseui = make_ui()
 
            # patch and reset hooks section of UI config to not run any
 
            # hooks on creating remote repo
 
            for k, v in baseui.configitems('hooks'):
 
                baseui.setconfig('hooks', k, None)
 

	
 
            repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui)
 
        elif repo_type == 'git':
 
            repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
 
            # add kallithea hook into this repo
 
            ScmModel().install_git_hooks(repo=repo)
 
        else:
 
            raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 

	
 
        log.debug('Created repo %s with %s backend',
 
                  repo_name, repo_type)
 
        return repo
 

	
 
    def _rename_filesystem_repo(self, old, new):
 
        """
 
        renames repository on filesystem
 

	
 
        :param old: old name
 
        :param new: new name
 
        """
 
        log.info('renaming repo from %s to %s', old, new)
 

	
 
        old_path = os.path.join(self.repos_path, old)
 
        new_path = os.path.join(self.repos_path, new)
 
        if os.path.isdir(new_path):
 
            raise Exception(
 
                'Was trying to rename to already existing dir %s' % new_path
 
            )
 
        shutil.move(old_path, new_path)
 

	
 
    def _delete_filesystem_repo(self, repo):
 
        """
 
        removes repo from filesystem, the removal is actually done by
 
        renaming dir to a 'rm__*' prefix which Kallithea will skip.
 
        It can be undeleted later by reverting the rename.
 

	
 
        :param repo: repo object
 
        """
 
        rm_path = os.path.join(self.repos_path, repo.repo_name)
 
        log.info("Removing %s", rm_path)
 

	
 
        _now = datetime.now()
 
        _ms = str(_now.microsecond).rjust(6, '0')
 
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
 
                             repo.just_name)
 
        if repo.group:
 
            args = repo.group.full_path_splitted + [_d]
 
            _d = os.path.join(*args)
 
        if os.path.exists(rm_path):
 
            shutil.move(rm_path, os.path.join(self.repos_path, _d))
 
        else:
 
            log.error("Can't find repo to delete in %r", rm_path)

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)