Changeset - d7d1e0a3850a
[Not reviewed]
default
0 8 0
Søren Løvborg - 9 years ago 2017-01-18 16:56:26
sorenl@unity3d.com
model: inline calls to BaseModel._get_repo
8 files changed with 24 insertions and 33 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/pullrequests.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.pullrequests
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
pull requests controller for Kallithea for initializing pull requests
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 7, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
import formencode
 
import re
 

	
 
from pylons import request, tmpl_context as c
 
from pylons.i18n.translation import _
 
from webob.exc import HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib import helpers as h
 
from kallithea.lib import diffs
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator, \
 
    NotAnonymous
 
from kallithea.lib.base import BaseRepoController, render, jsonify
 
from kallithea.lib.compat import json, OrderedDict
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.lib.exceptions import UserInvalidException
 
from kallithea.lib.page import Page
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.vcs.exceptions import EmptyRepositoryError, ChangesetDoesNotExistError
 
from kallithea.lib.vcs.utils import safe_str
 
from kallithea.lib.vcs.utils.hgcompat import unionrepo
 
from kallithea.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
 
    PullRequestReviewer, User
 
    PullRequestReviewer, Repository, User
 
from kallithea.model.pull_request import PullRequestModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.forms import PullRequestForm, PullRequestPostForm
 
from kallithea.lib.utils2 import safe_int
 
from kallithea.controllers.changeset import _ignorews_url, _context_url, \
 
    create_comment
 
from kallithea.controllers.compare import CompareController
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PullrequestsController(BaseRepoController):
 

	
 
    def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
 
        """return a structure with repo's interesting changesets, suitable for
 
        the selectors in pullrequest.html
 

	
 
        rev: a revision that must be in the list somehow and selected by default
 
        branch: a branch that must be in the list and selected by default - even if closed
 
        branch_rev: a revision of which peers should be preferred and available."""
 
        # list named branches that has been merged to this named branch - it should probably merge back
 
        peers = []
 

	
 
        if rev:
 
            rev = safe_str(rev)
 

	
 
        if branch:
 
            branch = safe_str(branch)
 

	
 
        if branch_rev:
 
            branch_rev = safe_str(branch_rev)
 
            # a revset not restricting to merge() would be better
 
            # (especially because it would get the branch point)
 
            # ... but is currently too expensive
 
            # including branches of children could be nice too
 
            peerbranches = set()
 
            for i in repo._repo.revs(
 
                "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
 
                branch_rev, branch_rev):
 
                abranch = repo.get_changeset(i).branch
 
                if abranch not in peerbranches:
 
                    n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id)
 
                    peers.append((n, abranch))
 
                    peerbranches.add(abranch)
 

	
 
        selected = None
 
        tiprev = repo.tags.get('tip')
 
        tipbranch = None
 

	
 
        branches = []
 
        for abranch, branchrev in repo.branches.iteritems():
 
            n = 'branch:%s:%s' % (abranch, branchrev)
 
            desc = abranch
 
            if branchrev == tiprev:
 
                tipbranch = abranch
 
                desc = '%s (current tip)' % desc
 
            branches.append((n, desc))
 
            if rev == branchrev:
 
                selected = n
 
            if branch == abranch:
 
                if not rev:
 
                    selected = n
 
                branch = None
 
        if branch:  # branch not in list - it is probably closed
 
            branchrev = repo.closed_branches.get(branch)
 
            if branchrev:
 
                n = 'branch:%s:%s' % (branch, branchrev)
 
                branches.append((n, _('%s (closed)') % branch))
 
                selected = n
 
                branch = None
 
            if branch:
 
                log.debug('branch %r not found in %s', branch, repo)
 

	
 
        bookmarks = []
 
        for bookmark, bookmarkrev in repo.bookmarks.iteritems():
 
            n = 'book:%s:%s' % (bookmark, bookmarkrev)
 
            bookmarks.append((n, bookmark))
 
            if rev == bookmarkrev:
 
                selected = n
 

	
 
        tags = []
 
        for tag, tagrev in repo.tags.iteritems():
 
            if tag == 'tip':
 
                continue
 
            n = 'tag:%s:%s' % (tag, tagrev)
 
            tags.append((n, tag))
 
            # note: even if rev == tagrev, don't select the static tag - it must be chosen explicitly
 

	
 
        # prio 1: rev was selected as existing entry above
 

	
 
        # prio 2: create special entry for rev; rev _must_ be used
 
        specials = []
 
        if rev and selected is None:
 
            selected = 'rev:%s:%s' % (rev, rev)
 
            specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))]
 

	
 
        # prio 3: most recent peer branch
 
        if peers and not selected:
 
            selected = peers[0][0]
 

	
 
        # prio 4: tip revision
 
        if not selected:
 
            if h.is_hg(repo):
 
                if tipbranch:
 
                    selected = 'branch:%s:%s' % (tipbranch, tiprev)
 
                else:
 
                    selected = 'tag:null:' + repo.EMPTY_CHANGESET
 
                    tags.append((selected, 'null'))
 
            else:
 
                if 'master' in repo.branches:
 
                    selected = 'branch:master:%s' % repo.branches['master']
 
                else:
 
                    k, v = repo.branches.items()[0]
 
                    selected = 'branch:%s:%s' % (k, v)
 

	
 
        groups = [(specials, _("Special")),
 
                  (peers, _("Peer branches")),
 
                  (bookmarks, _("Bookmarks")),
 
                  (branches, _("Branches")),
 
                  (tags, _("Tags")),
 
                  ]
 
        return [g for g in groups if g[0]], selected
 

	
 
    def _get_is_allowed_change_status(self, pull_request):
 
        if pull_request.is_closed():
 
            return False
 

	
 
        owner = request.authuser.user_id == pull_request.owner_id
 
        reviewer = PullRequestReviewer.query() \
 
            .filter(PullRequestReviewer.pull_request == pull_request) \
 
            .filter(PullRequestReviewer.user_id == request.authuser.user_id) \
 
            .count() != 0
 

	
 
        return request.authuser.admin or owner or reviewer
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def show_all(self, repo_name):
 
        c.from_ = request.GET.get('from_') or ''
 
        c.closed = request.GET.get('closed') or ''
 
        p = safe_int(request.GET.get('page'), 1)
 

	
 
        q = PullRequest.query(include_closed=c.closed, sorted=True)
 
        if c.from_:
 
            q = q.filter_by(org_repo=c.db_repo)
 
        else:
 
            q = q.filter_by(other_repo=c.db_repo)
 
        c.pull_requests = q.all()
 

	
 
        c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=100)
 

	
 
        return render('/pullrequests/pullrequest_show_all.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def show_my(self):
 
        c.closed = request.GET.get('closed') or ''
 

	
 
        c.my_pull_requests = PullRequest.query(
 
            include_closed=c.closed,
 
            sorted=True,
 
        ).filter_by(owner_id=request.authuser.user_id).all()
 

	
 
        c.participate_in_pull_requests = []
 
        c.participate_in_pull_requests_todo = []
 
        done_status = set([ChangesetStatus.STATUS_APPROVED, ChangesetStatus.STATUS_REJECTED])
 
        for pr in PullRequest.query(
 
            include_closed=c.closed,
 
            reviewer_id=request.authuser.user_id,
 
            sorted=True,
 
        ):
 
            status = pr.user_review_status(request.authuser.user_id) # very inefficient!!!
 
            if status in done_status:
 
                c.participate_in_pull_requests.append(pr)
 
            else:
 
                c.participate_in_pull_requests_todo.append(pr)
 

	
 
        return render('/pullrequests/pullrequest_show_my.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self):
 
        org_repo = c.db_repo
 
        org_scm_instance = org_repo.scm_instance
 
        try:
 
            org_scm_instance.get_changeset()
 
        except EmptyRepositoryError as e:
 
            h.flash(h.literal(_('There are no changesets yet')),
 
                    category='warning')
 
            raise HTTPFound(location=url('summary_home', repo_name=org_repo.repo_name))
 

	
 
        org_rev = request.GET.get('rev_end')
 
        # rev_start is not directly useful - its parent could however be used
 
        # as default for other and thus give a simple compare view
 
        rev_start = request.GET.get('rev_start')
 
        other_rev = None
 
        if rev_start:
 
            starters = org_repo.get_changeset(rev_start).parents
 
            if starters:
 
                other_rev = starters[0].raw_id
 
            else:
 
                other_rev = org_repo.scm_instance.EMPTY_CHANGESET
 
        branch = request.GET.get('branch')
 

	
 
        c.cs_repos = [(org_repo.repo_name, org_repo.repo_name)]
 
        c.default_cs_repo = org_repo.repo_name
 
        c.cs_refs, c.default_cs_ref = self._get_repo_refs(org_scm_instance, rev=org_rev, branch=branch)
 

	
 
        default_cs_ref_type, default_cs_branch, default_cs_rev = c.default_cs_ref.split(':')
 
        if default_cs_ref_type != 'branch':
 
            default_cs_branch = org_repo.get_changeset(default_cs_rev).branch
 

	
 
        # add org repo to other so we can open pull request against peer branches on itself
 
        c.a_repos = [(org_repo.repo_name, '%s (self)' % org_repo.repo_name)]
 

	
 
        if org_repo.parent:
 
            # add parent of this fork also and select it.
 
            # use the same branch on destination as on source, if available.
 
            c.a_repos.append((org_repo.parent.repo_name, '%s (parent)' % org_repo.parent.repo_name))
 
            c.a_repo = org_repo.parent
 
            c.a_refs, c.default_a_ref = self._get_repo_refs(
 
                    org_repo.parent.scm_instance, branch=default_cs_branch, rev=other_rev)
 

	
 
        else:
 
            c.a_repo = org_repo
 
            c.a_refs, c.default_a_ref = self._get_repo_refs(org_scm_instance, rev=other_rev)
 

	
 
        # gather forks and add to this list ... even though it is rare to
 
        # request forks to pull from their parent
 
        for fork in org_repo.forks:
 
            c.a_repos.append((fork.repo_name, fork.repo_name))
 

	
 
        return render('/pullrequests/pullrequest.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def repo_info(self, repo_name):
 
        repo = c.db_repo
 
        refs, selected_ref = self._get_repo_refs(repo.scm_instance)
 
        return {
 
            'description': repo.description.split('\n', 1)[0],
 
            'selected_ref': selected_ref,
 
            'refs': refs,
 
            }
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def create(self, repo_name):
 
        repo = c.db_repo
 
        try:
 
            _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
 
        except formencode.Invalid as errors:
 
            log.error(traceback.format_exc())
 
            log.error(str(errors))
 
            msg = _('Error creating pull request: %s') % errors.msg
 
            h.flash(msg, 'error')
 
            raise HTTPBadRequest
 

	
 
        # heads up: org and other might seem backward here ...
 
        org_repo_name = _form['org_repo']
 
        org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name
 
        org_repo = RepoModel()._get_repo(org_repo_name)
 
        org_repo = Repository.guess_instance(org_repo_name)
 
        (org_ref_type,
 
         org_ref_name,
 
         org_rev) = org_ref.split(':')
 
        org_display = h.short_ref(org_ref_type, org_ref_name)
 
        if org_ref_type == 'rev':
 
            cs = org_repo.scm_instance.get_changeset(org_rev)
 
            org_ref = 'branch:%s:%s' % (cs.branch, cs.raw_id)
 

	
 
        other_repo_name = _form['other_repo']
 
        other_ref = _form['other_ref'] # will have symbolic name and head revision
 
        other_repo = RepoModel()._get_repo(other_repo_name)
 
        other_repo = Repository.guess_instance(other_repo_name)
 
        (other_ref_type,
 
         other_ref_name,
 
         other_rev) = other_ref.split(':')
 
        if other_ref_type == 'rev':
 
            cs = other_repo.scm_instance.get_changeset(other_rev)
 
            other_ref_name = cs.raw_id[:12]
 
            other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, cs.raw_id)
 
        other_display = h.short_ref(other_ref_type, other_ref_name)
 

	
 
        cs_ranges, _cs_ranges_not, ancestor_revs = \
 
            CompareController._get_changesets(org_repo.scm_instance.alias,
 
                                              other_repo.scm_instance, other_rev, # org and other "swapped"
 
                                              org_repo.scm_instance, org_rev,
 
                                              )
 
        ancestor_rev = msg = None
 
        if not cs_ranges:
 
            msg = _('Cannot create empty pull request')
 
        elif not ancestor_revs:
 
            ancestor_rev = org_repo.scm_instance.EMPTY_CHANGESET
 
        elif len(ancestor_revs) == 1:
 
            ancestor_rev = ancestor_revs[0]
 
        else:
 
            msg = _('Cannot create pull request - criss cross merge detected, please merge a later %s revision to %s'
 
                    ) % (other_ref_name, org_ref_name)
 
        if ancestor_rev is None:
 
            h.flash(msg, category='error')
 
            log.error(msg)
 
            raise HTTPNotFound
 

	
 
        revisions = [cs_.raw_id for cs_ in cs_ranges]
 

	
 
        # hack: ancestor_rev is not an other_rev but we want to show the
 
        # requested destination and have the exact ancestor
 
        other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 

	
 
        reviewer_ids = []
 

	
 
        title = _form['pullrequest_title']
 
        if not title:
 
            if org_repo_name == other_repo_name:
 
                title = '%s to %s' % (org_display, other_display)
 
            else:
 
                title = '%s#%s to %s#%s' % (org_repo_name, org_display,
 
                                            other_repo_name, other_display)
 
        description = _form['pullrequest_desc'].strip() or _('No description')
 
        try:
 
            created_by = User.get(request.authuser.user_id)
 
            pull_request = PullRequestModel().create(
 
                created_by, org_repo, org_ref, other_repo, other_ref, revisions,
 
                title, description, reviewer_ids)
 
            Session().commit()
 
            h.flash(_('Successfully opened new pull request'),
 
                    category='success')
 
        except UserInvalidException as u:
 
            h.flash(_('Invalid reviewer "%s" specified') % u, category='error')
 
            raise HTTPBadRequest()
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            raise HTTPFound(location=url('pullrequest_home', repo_name=repo_name))
 

	
 
        raise HTTPFound(location=pull_request.url())
 

	
 
    def create_new_iteration(self, old_pull_request, new_rev, title, description, reviewer_ids):
 
        org_repo = old_pull_request.org_repo
 
        org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':')
 
        new_org_rev = self._get_ref_rev(org_repo, 'rev', new_rev)
 

	
 
        other_repo = old_pull_request.other_repo
 
        other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor
 
        #assert other_ref_type == 'branch', other_ref_type # TODO: what if not?
 
        new_other_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name)
 

	
 
        cs_ranges, _cs_ranges_not, ancestor_revs = CompareController._get_changesets(org_repo.scm_instance.alias,
 
            other_repo.scm_instance, new_other_rev, # org and other "swapped"
 
            org_repo.scm_instance, new_org_rev)
 
        ancestor_rev = msg = None
 
        if not cs_ranges:
 
            msg = _('Cannot create empty pull request update') # cannot happen!
 
        elif not ancestor_revs:
 
            msg = _('Cannot create pull request update - no common ancestor found') # cannot happen
 
        elif len(ancestor_revs) == 1:
 
            ancestor_rev = ancestor_revs[0]
 
        else:
 
            msg = _('Cannot create pull request update - criss cross merge detected, please merge a later %s revision to %s'
 
                    ) % (other_ref_name, org_ref_name)
 
        if ancestor_rev is None:
 
            h.flash(msg, category='error')
 
            log.error(msg)
 
            raise HTTPNotFound
 

	
 
        old_revisions = set(old_pull_request.revisions)
 
        revisions = [cs.raw_id for cs in cs_ranges]
 
        new_revisions = [r for r in revisions if r not in old_revisions]
 
        lost = old_revisions.difference(revisions)
 

	
 
        infos = ['This is a new iteration of %s "%s".' %
 
                 (h.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                      pull_request_id=old_pull_request.pull_request_id),
 
                  old_pull_request.title)]
 

	
 
        if lost:
 
            infos.append(_('Missing changesets since the previous iteration:'))
 
            for r in old_pull_request.revisions:
 
                if r in lost:
 
                    rev_desc = org_repo.get_changeset(r).message.split('\n')[0]
 
                    infos.append('  %s %s' % (h.short_id(r), rev_desc))
 

	
 
        if new_revisions:
 
            infos.append(_('New changesets on %s %s since the previous iteration:') % (org_ref_type, org_ref_name))
 
            for r in reversed(revisions):
 
                if r in new_revisions:
 
                    rev_desc = org_repo.get_changeset(r).message.split('\n')[0]
 
                    infos.append('  %s %s' % (h.short_id(r), h.shorter(rev_desc, 80)))
 

	
 
            if ancestor_rev == other_rev:
 
                infos.append(_("Ancestor didn't change - diff since previous iteration:"))
 
                infos.append(h.canonical_url('compare_url',
 
                                 repo_name=org_repo.repo_name, # other_repo is always same as repo_name
 
                                 org_ref_type='rev', org_ref_name=h.short_id(org_rev), # use old org_rev as base
 
                                 other_ref_type='rev', other_ref_name=h.short_id(new_org_rev),
 
                                 )) # note: linear diff, merge or not doesn't matter
 
            else:
 
                infos.append(_('This iteration is based on another %s revision and there is no simple diff.') % other_ref_name)
 
        else:
 
           infos.append(_('No changes found on %s %s since previous iteration.') % (org_ref_type, org_ref_name))
 
           # TODO: fail?
 

	
 
        # hack: ancestor_rev is not an other_ref but we want to show the
 
        # requested destination and have the exact ancestor
 
        new_other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 
        new_org_ref = '%s:%s:%s' % (org_ref_type, org_ref_name, new_org_rev)
 

	
 
        try:
 
            title, old_v = re.match(r'(.*)\(v(\d+)\)\s*$', title).groups()
 
            v = int(old_v) + 1
 
        except (AttributeError, ValueError):
 
            v = 2
 
        title = '%s (v%s)' % (title.strip(), v)
 

	
 
        # using a mail-like separator, insert new iteration info in description with latest first
 
        descriptions = description.replace('\r\n', '\n').split('\n-- \n', 1)
 
        description = descriptions[0].strip() + '\n\n-- \n' + '\n'.join(infos)
 
        if len(descriptions) > 1:
 
            description += '\n\n' + descriptions[1].strip()
 

	
 
        try:
 
            created_by = User.get(request.authuser.user_id)
 
            pull_request = PullRequestModel().create(
 
                created_by, org_repo, new_org_ref, other_repo, new_other_ref, revisions,
 
                title, description, reviewer_ids)
 
        except UserInvalidException as u:
 
            h.flash(_('Invalid reviewer "%s" specified') % u, category='error')
 
            raise HTTPBadRequest()
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            raise HTTPFound(location=old_pull_request.url())
 

	
 
        ChangesetCommentsModel().create(
 
            text=_('Closed, next iteration: %s .') % pull_request.url(canonical=True),
 
            repo=old_pull_request.other_repo_id,
 
            author=request.authuser.user_id,
 
            pull_request=old_pull_request.pull_request_id,
 
            closing_pr=True)
 
        PullRequestModel().close_pull_request(old_pull_request.pull_request_id)
 

	
 
        Session().commit()
 
        h.flash(_('New pull request iteration created'),
 
                category='success')
 

	
 
        raise HTTPFound(location=pull_request.url())
 

	
 
    # pullrequest_post for PR editing
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def post(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        if pull_request.is_closed():
 
            raise HTTPForbidden()
 
        assert pull_request.other_repo.repo_name == repo_name
 
        #only owner or admin can update it
 
        owner = pull_request.owner_id == request.authuser.user_id
 
        repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
 
        if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner):
 
            raise HTTPForbidden()
 

	
 
        _form = PullRequestPostForm()().to_python(request.POST)
 
        reviewer_ids = set(int(s) for s in _form['review_members'])
 

	
 
        org_reviewer_ids = set(int(s) for s in _form['org_review_members'])
 
        current_reviewer_ids = set(prr.user_id for prr in pull_request.reviewers)
 
        other_added = [User.get(u) for u in current_reviewer_ids - org_reviewer_ids]
 
        other_removed = [User.get(u) for u in org_reviewer_ids - current_reviewer_ids]
 
        if other_added:
 
            h.flash(_('Meanwhile, the following reviewers have been added: %s') %
 
                    (', '.join(u.username for u in other_added)),
 
                    category='warning')
 
        if other_removed:
 
            h.flash(_('Meanwhile, the following reviewers have been removed: %s') %
 
                    (', '.join(u.username for u in other_removed)),
 
                    category='warning')
 

	
 
        if _form['updaterev']:
 
            return self.create_new_iteration(pull_request,
 
                                      _form['updaterev'],
 
                                      _form['pullrequest_title'],
 
                                      _form['pullrequest_desc'],
 
                                      reviewer_ids)
 

	
 
        old_description = pull_request.description
 
        pull_request.title = _form['pullrequest_title']
 
        pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
 
        pull_request.owner = User.get_by_username(_form['owner'])
 
        user = User.get(request.authuser.user_id)
 
        add_reviewer_ids = reviewer_ids - org_reviewer_ids - current_reviewer_ids
 
        remove_reviewer_ids = (org_reviewer_ids - reviewer_ids) & current_reviewer_ids
 
        try:
 
            PullRequestModel().mention_from_description(user, pull_request, old_description)
 
            PullRequestModel().add_reviewers(user, pull_request, add_reviewer_ids)
 
            PullRequestModel().remove_reviewers(user, pull_request, remove_reviewer_ids)
 
        except UserInvalidException as u:
 
            h.flash(_('Invalid reviewer "%s" specified') % u, category='error')
 
            raise HTTPBadRequest()
 

	
 
        Session().commit()
 
        h.flash(_('Pull request updated'), category='success')
 

	
 
        raise HTTPFound(location=pull_request.url())
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def delete(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        #only owner can delete it !
 
        if pull_request.owner_id == request.authuser.user_id:
 
            PullRequestModel().delete(pull_request)
 
            Session().commit()
 
            h.flash(_('Successfully deleted pull request'),
 
                    category='success')
 
            raise HTTPFound(location=url('my_pullrequests'))
 
        raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def show(self, repo_name, pull_request_id, extra=None):
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 
        c.pull_request = PullRequest.get_or_404(pull_request_id)
 
        c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request)
 
        cc_model = ChangesetCommentsModel()
 
        cs_model = ChangesetStatusModel()
 

	
 
        # pull_requests repo_name we opened it against
 
        # ie. other_repo must match
 
        if repo_name != c.pull_request.other_repo.repo_name:
 
            raise HTTPNotFound
 

	
 
        # load compare data into template context
 
        c.cs_repo = c.pull_request.org_repo
 
        (c.cs_ref_type,
 
         c.cs_ref_name,
 
         c.cs_rev) = c.pull_request.org_ref.split(':')
 

	
 
        c.a_repo = c.pull_request.other_repo
 
        (c.a_ref_type,
 
         c.a_ref_name,
 
         c.a_rev) = c.pull_request.other_ref.split(':') # a_rev is ancestor
 

	
 
        org_scm_instance = c.cs_repo.scm_instance # property with expensive cache invalidation check!!!
 
        try:
 
            c.cs_ranges = [org_scm_instance.get_changeset(x)
 
                           for x in c.pull_request.revisions]
 
        except ChangesetDoesNotExistError:
 
            c.cs_ranges = []
 
            h.flash(_('Revision %s not found in %s') % (x, c.cs_repo.repo_name),
 
                'error')
 
        c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ...
 
        revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
 
        c.jsdata = json.dumps(graph_data(org_scm_instance, revs))
 

	
 
        c.is_range = False
 
        try:
 
            if c.a_ref_type == 'rev': # this looks like a free range where target is ancestor
 
                cs_a = org_scm_instance.get_changeset(c.a_rev)
 
                root_parents = c.cs_ranges[0].parents
 
                c.is_range = cs_a in root_parents
 
                #c.merge_root = len(root_parents) > 1 # a range starting with a merge might deserve a warning
 
        except ChangesetDoesNotExistError: # probably because c.a_rev not found
 
            pass
 
        except IndexError: # probably because c.cs_ranges is empty, probably because revisions are missing
 
            pass
 

	
 
        avail_revs = set()
 
        avail_show = []
 
        c.cs_branch_name = c.cs_ref_name
 
        c.a_branch_name = None
 
        other_scm_instance = c.a_repo.scm_instance
 
        c.update_msg = ""
 
        c.update_msg_other = ""
 
        try:
 
            if not c.cs_ranges:
 
                c.update_msg = _('Error: changesets not found when displaying pull request from %s.') % c.cs_rev
 
            elif org_scm_instance.alias == 'hg' and c.a_ref_name != 'ancestor':
 
                if c.cs_ref_type != 'branch':
 
                    c.cs_branch_name = org_scm_instance.get_changeset(c.cs_ref_name).branch # use ref_type ?
 
                c.a_branch_name = c.a_ref_name
 
                if c.a_ref_type != 'branch':
 
                    try:
 
                        c.a_branch_name = other_scm_instance.get_changeset(c.a_ref_name).branch # use ref_type ?
 
                    except EmptyRepositoryError:
 
                        c.a_branch_name = 'null' # not a branch name ... but close enough
 
                # candidates: descendants of old head that are on the right branch
 
                #             and not are the old head itself ...
 
                #             and nothing at all if old head is a descendant of target ref name
 
                if not c.is_range and other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, c.a_branch_name):
 
                    c.update_msg = _('This pull request has already been merged to %s.') % c.a_branch_name
 
                elif c.pull_request.is_closed():
 
                    c.update_msg = _('This pull request has been closed and can not be updated.')
 
                else: # look for descendants of PR head on source branch in org repo
 
                    avail_revs = org_scm_instance._repo.revs('%s:: & branch(%s)',
 
                                                             revs[0], c.cs_branch_name)
 
                    if len(avail_revs) > 1: # more than just revs[0]
 
                        # also show changesets that not are descendants but would be merged in
 
                        targethead = other_scm_instance.get_changeset(c.a_branch_name).raw_id
 
                        if org_scm_instance.path != other_scm_instance.path:
 
                            # Note: org_scm_instance.path must come first so all
 
                            # valid revision numbers are 100% org_scm compatible
 
                            # - both for avail_revs and for revset results
 
                            hgrepo = unionrepo.unionrepository(org_scm_instance.baseui,
 
                                                               org_scm_instance.path,
 
                                                               other_scm_instance.path)
 
                        else:
 
                            hgrepo = org_scm_instance._repo
 
                        show = set(hgrepo.revs('::%ld & !::parents(%s) & !::%s',
 
                                               avail_revs, revs[0], targethead))
 
                        c.update_msg = _('The following additional changes are available on %s:') % c.cs_branch_name
 
                    else:
 
                        show = set()
 
                        avail_revs = set() # drop revs[0]
 
                        c.update_msg = _('No additional changesets found for iterating on this pull request.')
 

	
 
                    # TODO: handle branch heads that not are tip-most
 
                    brevs = org_scm_instance._repo.revs('%s - %ld - %s', c.cs_branch_name, avail_revs, revs[0])
 
                    if brevs:
 
                        # also show changesets that are on branch but neither ancestors nor descendants
 
                        show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name))
 
                        show.add(revs[0]) # make sure graph shows this so we can see how they relate
 
                        c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name,
 
                            h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id))
 

	
 
                    avail_show = sorted(show, reverse=True)
 

	
 
            elif org_scm_instance.alias == 'git':
 
                c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not
 
                c.update_msg = _("Git pull requests don't support iterating yet.")
 
        except ChangesetDoesNotExistError:
 
            c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev
 

	
 
        c.avail_revs = avail_revs
 
        c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show]
 
        c.avail_jsdata = json.dumps(graph_data(org_scm_instance, avail_show))
 

	
 
        raw_ids = [x.raw_id for x in c.cs_ranges]
 
        c.cs_comments = c.cs_repo.get_comments(raw_ids)
 
        c.statuses = c.cs_repo.statuses(raw_ids)
 

	
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = safe_int(request.GET.get('context'), 3)
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.fulldiff = request.GET.get('fulldiff')
 
        diff_limit = self.cut_off_limit if not c.fulldiff else None
 

	
 
        # we swap org/other ref since we run a simple diff on one repo
 
        log.debug('running diff between %s and %s in %s',
 
                  c.a_rev, c.cs_rev, org_scm_instance.path)
 
        try:
 
            txtdiff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
 
                                                ignore_whitespace=ignore_whitespace,
 
                                                context=line_context)
 
        except ChangesetDoesNotExistError:
 
            txtdiff =  _("The diff can't be shown - the PR revisions could not be found.")
 
        diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff',
 
                                             diff_limit=diff_limit)
 
        _parsed = diff_processor.prepare()
 

	
 
        c.limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            c.limited_diff = True
 

	
 
        c.file_diff_data = []
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 

	
 
        for f in _parsed:
 
            st = f['stats']
 
            c.lines_added += st['added']
 
            c.lines_deleted += st['deleted']
 
            filename = f['filename']
 
            fid = h.FID('', filename)
 
            diff = diff_processor.as_html(enable_comments=True,
 
                                          parsed_lines=[f])
 
            c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
 

	
 
        # inline comments
 
        c.inline_cnt = 0
 
        c.inline_comments = cc_model.get_inline_comments(
 
                                c.db_repo.repo_id,
 
                                pull_request=pull_request_id)
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 
        # comments
 
        c.comments = cc_model.get_comments(c.db_repo.repo_id,
 
                                           pull_request=pull_request_id)
 

	
 
        # (badly named) pull-request status calculation based on reviewer votes
 
        (c.pull_request_reviewers,
 
         c.pull_request_pending_reviewers,
 
         c.current_voting_result,
 
         ) = cs_model.calculate_pull_request_result(c.pull_request)
 
        c.changeset_statuses = ChangesetStatus.STATUSES
 

	
 
        c.as_form = False
 
        c.ancestors = None # [c.a_rev] ... but that is shown in an other way
 
        return render('/pullrequests/pullrequest_show.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def comment(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 

	
 
        status = request.POST.get('changeset_status')
 
        close_pr = request.POST.get('save_close')
 
        delete = request.POST.get('save_delete')
 
        f_path = request.POST.get('f_path')
 
        line_no = request.POST.get('line')
 

	
 
        if (status or close_pr or delete) and (f_path or line_no):
 
            # status votes and closing is only possible in general comments
 
            raise HTTPBadRequest()
 

	
 
        allowed_to_change_status = self._get_is_allowed_change_status(pull_request)
 
        if not allowed_to_change_status:
 
            if status or close_pr:
 
                h.flash(_('No permission to change pull request status'), 'error')
 
                raise HTTPForbidden()
 

	
 
        if delete == "delete":
 
            if (pull_request.owner_id == request.authuser.user_id or
 
                h.HasPermissionAny('hg.admin')() or
 
                h.HasRepoPermissionAny('repository.admin')(pull_request.org_repo.repo_name) or
 
                h.HasRepoPermissionAny('repository.admin')(pull_request.other_repo.repo_name)
 
                ) and not pull_request.is_closed():
 
                PullRequestModel().delete(pull_request)
 
                Session().commit()
 
                h.flash(_('Successfully deleted pull request %s') % pull_request_id,
 
                        category='success')
 
                return {
 
                   'location': url('my_pullrequests'), # or repo pr list?
 
                }
 
                raise HTTPFound(location=url('my_pullrequests')) # or repo pr list?
 
            raise HTTPForbidden()
 

	
 
        text = request.POST.get('text', '').strip()
 

	
 
        comment = create_comment(
 
            text,
 
            status,
 
            pull_request_id=pull_request_id,
 
            f_path=f_path,
 
            line_no=line_no,
 
            closing_pr=close_pr,
 
        )
 

	
 
        action_logger(request.authuser,
 
                      'user_commented_pull_request:%s' % pull_request_id,
 
                      c.db_repo, request.ip_addr, self.sa)
 

	
 
        if status:
 
            ChangesetStatusModel().set_status(
 
                c.db_repo.repo_id,
 
                status,
 
                request.authuser.user_id,
 
                comment,
 
                pull_request=pull_request_id
 
            )
 

	
 
        if close_pr:
 
            PullRequestModel().close_pull_request(pull_request_id)
 
            action_logger(request.authuser,
 
                          'user_closed_pull_request:%s' % pull_request_id,
 
                          c.db_repo, request.ip_addr, self.sa)
 

	
 
        Session().commit()
 

	
 
        if not request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            raise HTTPFound(location=pull_request.url())
 

	
 
        data = {
 
           'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
 
        }
 
        if comment is not None:
 
            c.comment = comment
 
            data.update(comment.get_dict())
 
            data.update({'rendered_text':
 
                         render('changeset/changeset_comment_block.html')})
 

	
 
        return data
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def delete_comment(self, repo_name, comment_id):
 
        co = ChangesetComment.get(comment_id)
 
        if co.pull_request.is_closed():
 
            #don't allow deleting comments on closed pull request
 
            raise HTTPForbidden()
 

	
 
        owner = co.author_id == request.authuser.user_id
 
        repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
 
        if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
 
            ChangesetCommentsModel().delete(comment=co)
 
            Session().commit()
 
            return True
 
        else:
 
            raise HTTPForbidden()
kallithea/lib/celerylib/tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.celerylib.tasks
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea task modules, containing all task that suppose to be run
 
by celery daemon
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 6, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import traceback
 
import logging
 
import rfc822
 

	
 
from time import mktime
 
from operator import itemgetter
 
from string import lower
 

	
 
from pylons import config
 

	
 
from kallithea import CELERY_ON
 
from kallithea.lib import celerylib
 
from kallithea.lib.helpers import person
 
from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
 
from kallithea.lib.utils import setup_cache_regions, action_logger
 
from kallithea.lib.utils2 import str2bool
 
from kallithea.lib.vcs.utils import author_email
 
from kallithea.lib.compat import json, OrderedDict
 
from kallithea.lib.hooks import log_create_repository
 

	
 
from kallithea.model.db import Statistics, Repository, User
 

	
 

	
 
setup_cache_regions(config)  # pragma: no cover
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
@celerylib.task
 
@celerylib.locked_task
 
@celerylib.dbsession
 
def whoosh_index(repo_location, full_index):
 
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
    DBS = celerylib.get_session()
 

	
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=DBS) \
 
                         .run(full_index=full_index)
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
 
    DBS = celerylib.get_session()
 
    lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config['app_conf']['cache_dir']
 

	
 
    log.info('running task with lockkey %s', lockkey)
 

	
 
    try:
 
        lock = l = celerylib.DaemonLock(file_=os.path.join(lockkey_path, lockkey))
 

	
 
        # for js data compatibility cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is None:
 
            return True
 

	
 
        repo = repo.scm_instance
 
        repo_size = repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
        last_rev = None
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        dbrepo = DBS.query(Repository) \
 
            .filter(Repository.repo_name == repo_name).scalar()
 
        cur_stats = DBS.query(Statistics) \
 
            .filter(Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == repo.get_changeset().revision and repo_size > 1:
 
            # pass silently without any work if we're not on first revision or
 
            # current state of parsing revision(from db marker) is the
 
            # last revision
 
            lock.release()
 
            return True
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s', parse_limit)
 
        lmktime = mktime
 

	
 
        last_rev = last_rev + 1 if last_rev >= 0 else 0
 
        log.debug('Getting revisions from %s to %s',
 
             last_rev, last_rev + parse_limit
 
        )
 
        for cs in repo[last_rev:last_rev + parse_limit]:
 
            log.debug('parsing %s', cs)
 
            last_cs = cs  # remember last parsed changeset
 
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 

	
 
            if akc(cs.author) in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[akc(cs.author)]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = None
 

	
 
                if time_pos >= 0 and time_pos is not None:
 

	
 
                    datadict = \
 
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data'] \
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time":k,
 
                                                 "commits":1,
 
                                                 "added":len(cs.added),
 
                                                 "changed":len(cs.changed),
 
                                                 "removed":len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            #gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = json.dumps(co_day_auth_aggr)
 
        stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
        log.debug('last revision %s', last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s', leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            DBS.add(stats)
 
            DBS.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            DBS.rollback()
 
            lock.release()
 
            return False
 

	
 
        # final release
 
        lock.release()
 

	
 
        # execute another task if celery is enabled
 
        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
 
            get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
 
        elif recurse_limit <= 0:
 
            log.debug('Not recursing - limit has been reached')
 
        else:
 
            log.debug('Not recursing')
 
    except celerylib.LockHeld:
 
        log.info('Task with key %s already running', lockkey)
 
        return 'Task with key %s already running' % lockkey
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def send_email(recipients, subject, body='', html_body='', headers=None, author=None):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, if this is None, the defined email
 
        address from field 'email_to' and all admins is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    :param html_body: html version of body
 
    :param headers: dictionary of prepopulated e-mail headers
 
    :param author: User object of the author of this mail, if known and relevant
 
    """
 
    assert isinstance(recipients, list), recipients
 
    if headers is None:
 
        headers = {}
 
    else:
 
        # do not modify the original headers object passed by the caller
 
        headers = headers.copy()
 

	
 
    email_config = config
 
    email_prefix = email_config.get('email_prefix', '')
 
    if email_prefix:
 
        subject = "%s %s" % (email_prefix, subject)
 

	
 
    if not recipients:
 
        # if recipients are not defined we send to email_config + all admins
 
        recipients = [u.email for u in User.query()
 
                      .filter(User.admin == True).all()]
 
        if email_config.get('email_to') is not None:
 
            recipients += [email_config.get('email_to')]
 

	
 
        # If there are still no recipients, there are no admins and no address
 
        # configured in email_to, so return.
 
        if not recipients:
 
            log.error("No recipients specified and no fallback available.")
 
            return False
 

	
 
        log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
 

	
 
    # SMTP sender
 
    envelope_from = email_config.get('app_email_from', 'Kallithea')
 
    # 'From' header
 
    if author is not None:
 
        # set From header based on author but with a generic e-mail address
 
        # In case app_email_from is in "Some Name <e-mail>" format, we first
 
        # extract the e-mail address.
 
        envelope_addr = author_email(envelope_from)
 
        headers['From'] = '"%s" <%s>' % (
 
            rfc822.quote('%s (no-reply)' % author.full_name_or_username),
 
            envelope_addr)
 

	
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(email_config.get('debug'))
 
    smtp_auth = email_config.get('smtp_auth')
 

	
 
    logmsg = ("Mail details:\n"
 
              "recipients: %s\n"
 
              "headers: %s\n"
 
              "subject: %s\n"
 
              "body:\n%s\n"
 
              "html:\n%s\n"
 
              % (' '.join(recipients), headers, subject, body, html_body))
 

	
 
    if mail_server:
 
        log.debug("Sending e-mail. " + logmsg)
 
    else:
 
        log.error("SMTP mail server not configured - cannot send e-mail.")
 
        log.warning(logmsg)
 
        return False
 

	
 
    try:
 
        m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body, html_body, headers=headers)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def create_repo(form_data, cur_user):
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.db import Setting
 

	
 
    DBS = celerylib.get_session()
 

	
 
    cur_user = User.guess_instance(cur_user)
 

	
 
    owner = cur_user
 
    repo_name = form_data['repo_name']
 
    repo_name_full = form_data['repo_name_full']
 
    repo_type = form_data['repo_type']
 
    description = form_data['repo_description']
 
    private = form_data['repo_private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['repo_landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 
    copy_group_permissions = form_data.get('repo_copy_permissions')
 
    fork_of = form_data.get('fork_parent_id')
 
    state = form_data.get('repo_state', Repository.STATE_PENDING)
 

	
 
    # repo creation defaults, private and repo_type are filled in form
 
    defs = Setting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 

	
 
    try:
 
        repo = RepoModel(DBS)._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=description,
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions,
 
            copy_group_permissions=copy_group_permissions,
 
            enable_statistics=enable_statistics,
 
            enable_locking=enable_locking,
 
            enable_downloads=enable_downloads,
 
            state=state
 
        )
 

	
 
        action_logger(cur_user, 'user_created_repo',
 
                      form_data['repo_name_full'], '', DBS)
 

	
 
        DBS.commit()
 
        # now create this repo on Filesystem
 
        RepoModel(DBS)._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
 
            clone_uri=clone_uri,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception as e:
 
        log.warning('Exception %s occurred when forking repository, '
 
                    'doing cleanup...' % e)
 
        # rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel(DBS)._delete_filesystem_repo(repo)
 
        raise
 

	
 
    return True
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def create_repo_fork(form_data, cur_user):
 
    """
 
    Creates a fork of repository using interval VCS methods
 

	
 
    :param form_data:
 
    :param cur_user:
 
    """
 
    from kallithea.model.repo import RepoModel
 

	
 
    DBS = celerylib.get_session()
 

	
 
    base_path = Repository.base_path()
 
    cur_user = User.guess_instance(cur_user)
 

	
 
    repo_name = form_data['repo_name']  # fork in this case
 
    repo_name_full = form_data['repo_name_full']
 

	
 
    repo_type = form_data['repo_type']
 
    owner = cur_user
 
    private = form_data['private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 

	
 
    try:
 
        fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
 
        fork_of = Repository.guess_instance(form_data.get('fork_parent_id'))
 

	
 
        RepoModel(DBS)._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=form_data['description'],
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions
 
        )
 
        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
 
                      fork_of.repo_name, '', DBS)
 
        DBS.commit()
 

	
 
        update_after_clone = form_data['update_after_clone'] # FIXME - unused!
 
        source_repo_path = os.path.join(base_path, fork_of.repo_name)
 

	
 
        # now create this repo on Filesystem
 
        RepoModel(DBS)._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
 
            clone_uri=source_repo_path,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception as e:
 
        log.warning('Exception %s occurred when forking repository, '
 
                    'doing cleanup...' % e)
 
        #rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel(DBS)._delete_filesystem_repo(repo)
 
        raise
 

	
 
    return True
 

	
 

	
 
def __get_codes_stats(repo_name):
 
    from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP
 
    repo = Repository.get_by_repo_name(repo_name).scm_instance
 

	
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            ext = lower(f.extension)
 
            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
 
                if ext in code_stats:
 
                    code_stats[ext] += 1
 
                else:
 
                    code_stats[ext] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
kallithea/model/base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.base
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
The application's model objects
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 25, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
from kallithea.model import meta
 
from kallithea.lib.utils2 import obfuscate_url_pw
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def init_model(engine):
 
    """
 
    Initializes db session, bind the engine with the metadata,
 
    Call this before using any of the tables or classes in the model,
 
    preferably once in application start
 

	
 
    :param engine: engine to bind to
 
    """
 
    engine_str = obfuscate_url_pw(str(engine.url))
 
    log.info("initializing db for %s", engine_str)
 
    meta.Base.metadata.bind = engine
 

	
 

	
 
class BaseModel(object):
 
    """
 
    Base Model for all Kallithea models, it adds sql alchemy session
 
    into instance of model
 

	
 
    :param sa: If passed it reuses this session instead of creating a new one
 
    """
 

	
 
    def __init__(self, sa=None):
 
        if sa is not None:
 
            self.sa = sa
 
        else:
 
            self.sa = meta.Session()
 

	
 
    def _get_repo(self, repository):
 
        """
 
        Helper method to get repository by ID, or repository name
 

	
 
        :param repository: RepoID, repository name or Repository Instance
 
        """
 
        from kallithea.model.db import Repository
 
        return Repository.guess_instance(repository)
 

	
 
    def _get_perm(self, permission):
 
        """
 
        Helper method to get permission by ID, or permission name
 

	
 
        :param permission: PermissionID, permission_name or Permission instance
 
        """
 
        from kallithea.model.db import Permission
 
        return Permission.guess_instance(permission)
kallithea/model/changeset_status.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.changeset_status
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Changeset status controller
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 30, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
from sqlalchemy.orm import joinedload
 

	
 
from kallithea.model.base import BaseModel
 
from kallithea.model.db import ChangesetStatus, PullRequest, User
 
from kallithea.model.db import ChangesetStatus, PullRequest, Repository, User
 
from kallithea.lib.exceptions import StatusChangeOnClosedPullRequestError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ChangesetStatusModel(BaseModel):
 

	
 
    def _get_status_query(self, repo, revision, pull_request,
 
                          with_revisions=False):
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 

	
 
        q = ChangesetStatus.query() \
 
            .filter(ChangesetStatus.repo == repo)
 
        if not with_revisions:
 
            # only report the latest vote across all users! TODO: be smarter!
 
            q = q.filter(ChangesetStatus.version == 0)
 

	
 
        if revision:
 
            q = q.filter(ChangesetStatus.revision == revision)
 
        elif pull_request:
 
            pull_request = PullRequest.guess_instance(pull_request)
 
            q = q.filter(ChangesetStatus.pull_request == pull_request)
 
        else:
 
            raise Exception('Please specify revision or pull_request')
 
        q = q.order_by(ChangesetStatus.version.asc())
 
        return q
 

	
 
    def _calculate_status(self, statuses):
 
        """
 
        Given a list of statuses, calculate the resulting status, according to
 
        the policy: approve if consensus, reject when at least one reject.
 
        """
 

	
 
        if not statuses:
 
            return ChangesetStatus.STATUS_UNDER_REVIEW
 

	
 
        if all(st and st.status == ChangesetStatus.STATUS_APPROVED for st in statuses):
 
            return ChangesetStatus.STATUS_APPROVED
 

	
 
        if any(st and st.status == ChangesetStatus.STATUS_REJECTED for st in statuses):
 
            return ChangesetStatus.STATUS_REJECTED
 

	
 
        return ChangesetStatus.STATUS_UNDER_REVIEW
 

	
 
    def calculate_pull_request_result(self, pull_request):
 
        """
 
        Return a tuple (reviewers, pending reviewers, pull request status)
 
        Only approve and reject counts as valid votes.
 
        """
 

	
 
        # collect latest votes from all voters
 
        cs_statuses = dict()
 
        for st in reversed(self.get_statuses(pull_request.org_repo,
 
                                             pull_request=pull_request,
 
                                             with_revisions=True)):
 
            cs_statuses[st.author.username] = st
 

	
 
        # collect votes from official reviewers
 
        pull_request_reviewers = []
 
        pull_request_pending_reviewers = []
 
        relevant_statuses = []
 
        for user in pull_request.get_reviewer_users():
 
            st = cs_statuses.get(user.username)
 
            relevant_statuses.append(st)
 
            status = ChangesetStatus.STATUS_NOT_REVIEWED if st is None else st.status
 
            if status in (ChangesetStatus.STATUS_NOT_REVIEWED,
 
                          ChangesetStatus.STATUS_UNDER_REVIEW):
 
                pull_request_pending_reviewers.append(user)
 
            pull_request_reviewers.append((user, status))
 

	
 
        result = self._calculate_status(relevant_statuses)
 

	
 
        return (pull_request_reviewers,
 
                pull_request_pending_reviewers,
 
                result)
 

	
 
    def get_statuses(self, repo, revision=None, pull_request=None,
 
                     with_revisions=False):
 
        q = self._get_status_query(repo, revision, pull_request,
 
                                   with_revisions)
 
        q = q.options(joinedload('author'))
 
        return q.all()
 

	
 
    def get_status(self, repo, revision=None, pull_request=None, as_str=True):
 
        """
 
        Returns latest status of changeset for given revision or for given
 
        pull request. Statuses are versioned inside a table itself and
 
        version == 0 is always the current one
 

	
 
        :param repo:
 
        :param revision: 40char hash or None
 
        :param pull_request: pull_request reference
 
        :param as_str: return status as string not object
 
        """
 
        q = self._get_status_query(repo, revision, pull_request)
 

	
 
        # need to use first here since there can be multiple statuses
 
        # returned from pull_request
 
        status = q.first()
 
        if as_str:
 
            return str(status.status) if status else ChangesetStatus.DEFAULT
 
        return status
 

	
 
    def set_status(self, repo, status, user, comment, revision=None,
 
                   pull_request=None, dont_allow_on_closed_pull_request=False):
 
        """
 
        Creates new status for changeset or updates the old ones bumping their
 
        version, leaving the current status at the value of 'status'.
 

	
 
        :param repo:
 
        :param status:
 
        :param user:
 
        :param comment:
 
        :param revision:
 
        :param pull_request:
 
        :param dont_allow_on_closed_pull_request: don't allow a status change
 
            if last status was for pull request and it's closed. We shouldn't
 
            mess around this manually
 
        """
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 

	
 
        q = ChangesetStatus.query()
 
        if revision is not None:
 
            assert pull_request is None
 
            q = q.filter(ChangesetStatus.repo == repo)
 
            q = q.filter(ChangesetStatus.revision == revision)
 
            revisions = [revision]
 
        else:
 
            assert pull_request is not None
 
            pull_request = PullRequest.guess_instance(pull_request)
 
            repo = pull_request.org_repo
 
            q = q.filter(ChangesetStatus.repo == repo)
 
            q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
 
            revisions = pull_request.revisions
 
        cur_statuses = q.all()
 

	
 
        #if statuses exists and last is associated with a closed pull request
 
        # we need to check if we can allow this status change
 
        if (dont_allow_on_closed_pull_request and cur_statuses
 
            and getattr(cur_statuses[0].pull_request, 'status', '')
 
                == PullRequest.STATUS_CLOSED):
 
            raise StatusChangeOnClosedPullRequestError(
 
                'Changing status on closed pull request is not allowed'
 
            )
 

	
 
        #update all current statuses with older version
 
        for st in cur_statuses:
 
            st.version += 1
 
            self.sa.add(st)
 

	
 
        new_statuses = []
 
        for rev in revisions:
 
            new_status = ChangesetStatus()
 
            new_status.version = 0 # default
 
            new_status.author = User.guess_instance(user)
 
            new_status.repo = self._get_repo(repo)
 
            new_status.repo = Repository.guess_instance(repo)
 
            new_status.status = status
 
            new_status.comment = comment
 
            new_status.revision = rev
 
            new_status.pull_request = pull_request
 
            new_statuses.append(new_status)
 
            self.sa.add(new_status)
 
        return new_statuses
kallithea/model/comment.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.comment
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
comments model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 11, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 

	
 
from pylons.i18n.translation import _
 
from collections import defaultdict
 

	
 
from kallithea.lib.utils2 import extract_mentioned_users, safe_unicode
 
from kallithea.lib import helpers as h
 
from kallithea.model.base import BaseModel
 
from kallithea.model.db import ChangesetComment, User, \
 
    Notification, PullRequest
 
    Notification, PullRequest, Repository
 
from kallithea.model.notification import NotificationModel
 
from kallithea.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ChangesetCommentsModel(BaseModel):
 

	
 
    def _get_notification_data(self, repo, comment, author, comment_text,
 
                               line_no=None, revision=None, pull_request=None,
 
                               status_change=None, closing_pr=False):
 
        """
 
        :returns: tuple (subj,body,recipients,notification_type,email_kwargs)
 
        """
 
        # make notification
 
        body = comment_text  # text of the comment
 
        line = ''
 
        if line_no:
 
            line = _('on line %s') % line_no
 

	
 
        #changeset
 
        if revision:
 
            notification_type = Notification.TYPE_CHANGESET_COMMENT
 
            cs = repo.scm_instance.get_changeset(revision)
 
            desc = cs.short_id
 

	
 
            threading = ['%s-rev-%s@%s' % (repo.repo_name, revision, h.canonical_hostname())]
 
            if line_no: # TODO: url to file _and_ line number
 
                threading.append('%s-rev-%s-line-%s@%s' % (repo.repo_name, revision, line_no,
 
                                                           h.canonical_hostname()))
 
            comment_url = h.canonical_url('changeset_home',
 
                repo_name=repo.repo_name,
 
                revision=revision,
 
                anchor='comment-%s' % comment.comment_id)
 
            subj = safe_unicode(
 
                h.link_to('Re changeset: %(desc)s %(line)s' % \
 
                          {'desc': desc, 'line': line},
 
                          comment_url)
 
            )
 
            # get the current participants of this changeset
 
            recipients = ChangesetComment.get_users(revision=revision)
 
            # add changeset author if it's known locally
 
            cs_author = User.get_from_cs_author(cs.author)
 
            if not cs_author:
 
                #use repo owner if we cannot extract the author correctly
 
                # FIXME: just use committer name even if not a user
 
                cs_author = repo.owner
 
            recipients += [cs_author]
 
            email_kwargs = {
 
                'status_change': status_change,
 
                'cs_comment_user': author.full_name_and_username,
 
                'cs_target_repo': h.canonical_url('summary_home', repo_name=repo.repo_name),
 
                'cs_comment_url': comment_url,
 
                'cs_url': h.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision),
 
                'raw_id': revision,
 
                'message': cs.message,
 
                'message_short': h.shorter(cs.message, 50, firstline=True),
 
                'cs_author': cs_author,
 
                'repo_name': repo.repo_name,
 
                'short_id': h.short_id(revision),
 
                'branch': cs.branch,
 
                'comment_username': author.username,
 
                'threading': threading,
 
            }
 
        #pull request
 
        elif pull_request:
 
            notification_type = Notification.TYPE_PULL_REQUEST_COMMENT
 
            desc = comment.pull_request.title
 
            _org_ref_type, org_ref_name, _org_rev = comment.pull_request.org_ref.split(':')
 
            _other_ref_type, other_ref_name, _other_rev = comment.pull_request.other_ref.split(':')
 
            threading = ['%s-pr-%s@%s' % (pull_request.other_repo.repo_name,
 
                                          pull_request.pull_request_id,
 
                                          h.canonical_hostname())]
 
            if line_no: # TODO: url to file _and_ line number
 
                threading.append('%s-pr-%s-line-%s@%s' % (pull_request.other_repo.repo_name,
 
                                                          pull_request.pull_request_id, line_no,
 
                                                          h.canonical_hostname()))
 
            comment_url = pull_request.url(canonical=True,
 
                anchor='comment-%s' % comment.comment_id)
 
            subj = safe_unicode(
 
                h.link_to('Re pull request %(pr_nice_id)s: %(desc)s %(line)s' % \
 
                          {'desc': desc,
 
                           'pr_nice_id': comment.pull_request.nice_id(),
 
                           'line': line},
 
                          comment_url)
 
            )
 
            # get the current participants of this pull request
 
            recipients = ChangesetComment.get_users(pull_request_id=
 
                                                pull_request.pull_request_id)
 
            # add pull request author
 
            recipients += [pull_request.owner]
 

	
 
            # add the reviewers to notification
 
            recipients += pull_request.get_reviewer_users()
 

	
 
            #set some variables for email notification
 
            email_kwargs = {
 
                'pr_title': pull_request.title,
 
                'pr_title_short': h.shorter(pull_request.title, 50),
 
                'pr_nice_id': pull_request.nice_id(),
 
                'status_change': status_change,
 
                'closing_pr': closing_pr,
 
                'pr_comment_url': comment_url,
 
                'pr_url': pull_request.url(canonical=True),
 
                'pr_comment_user': author.full_name_and_username,
 
                'pr_target_repo': h.canonical_url('summary_home',
 
                                   repo_name=pull_request.other_repo.repo_name),
 
                'pr_target_branch': other_ref_name,
 
                'pr_source_repo': h.canonical_url('summary_home',
 
                                   repo_name=pull_request.org_repo.repo_name),
 
                'pr_source_branch': org_ref_name,
 
                'pr_owner': pull_request.owner,
 
                'pr_owner_username': pull_request.owner.username,
 
                'repo_name': pull_request.other_repo.repo_name,
 
                'comment_username': author.username,
 
                'threading': threading,
 
            }
 

	
 
        return subj, body, recipients, notification_type, email_kwargs
 

	
 
    def create(self, text, repo, author, revision=None, pull_request=None,
 
               f_path=None, line_no=None, status_change=None, closing_pr=False,
 
               send_email=True):
 
        """
 
        Creates a new comment for either a changeset or a pull request.
 
        status_change and closing_pr is only for the optional email.
 

	
 
        Returns the created comment.
 
        """
 
        if not status_change and not text:
 
            log.warning('Missing text for comment, skipping...')
 
            return None
 

	
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        author = User.guess_instance(author)
 
        comment = ChangesetComment()
 
        comment.repo = repo
 
        comment.author = author
 
        comment.text = text
 
        comment.f_path = f_path
 
        comment.line_no = line_no
 

	
 
        if revision is not None:
 
            comment.revision = revision
 
        elif pull_request is not None:
 
            pull_request = PullRequest.guess_instance(pull_request)
 
            comment.pull_request = pull_request
 
        else:
 
            raise Exception('Please specify revision or pull_request_id')
 

	
 
        Session().add(comment)
 
        Session().flush()
 

	
 
        if send_email:
 
            (subj, body, recipients, notification_type,
 
             email_kwargs) = self._get_notification_data(
 
                                repo, comment, author,
 
                                comment_text=text,
 
                                line_no=line_no,
 
                                revision=revision,
 
                                pull_request=pull_request,
 
                                status_change=status_change,
 
                                closing_pr=closing_pr)
 
            email_kwargs['is_mention'] = False
 
            # create notification objects, and emails
 
            NotificationModel().create(
 
                created_by=author, subject=subj, body=body,
 
                recipients=recipients, type_=notification_type,
 
                email_kwargs=email_kwargs,
 
            )
 

	
 
            mention_recipients = extract_mentioned_users(body).difference(recipients)
 
            if mention_recipients:
 
                email_kwargs['is_mention'] = True
 
                subj = _('[Mention]') + ' ' + subj
 
                # FIXME: this subject is wrong and unused!
 
                NotificationModel().create(
 
                    created_by=author, subject=subj, body=body,
 
                    recipients=mention_recipients,
 
                    type_=notification_type,
 
                    email_kwargs=email_kwargs
 
                )
 

	
 
        return comment
 

	
 
    def delete(self, comment):
 
        comment = ChangesetComment.guess_instance(comment)
 
        Session().delete(comment)
 

	
 
        return comment
 

	
 
    def get_comments(self, repo_id, revision=None, pull_request=None):
 
        """
 
        Gets general comments for either revision or pull_request.
 

	
 
        Returns a list, ordered by creation date.
 
        """
 
        return self._get_comments(repo_id, revision=revision, pull_request=pull_request,
 
                                  inline=False)
 

	
 
    def get_inline_comments(self, repo_id, revision=None, pull_request=None):
 
        """
 
        Gets inline comments for either revision or pull_request.
 

	
 
        Returns a list of tuples with file path and list of comments per line number.
 
        """
 
        comments = self._get_comments(repo_id, revision=revision, pull_request=pull_request,
 
                                      inline=True)
 

	
 
        paths = defaultdict(lambda: defaultdict(list))
 
        for co in comments:
 
            paths[co.f_path][co.line_no].append(co)
 
        return paths.items()
 

	
 
    def _get_comments(self, repo_id, revision=None, pull_request=None, inline=False):
 
        """
 
        Gets comments for either revision or pull_request_id, either inline or general.
 
        """
 
        q = Session().query(ChangesetComment)
 

	
 
        if inline:
 
            q = q.filter(ChangesetComment.line_no != None) \
 
                .filter(ChangesetComment.f_path != None)
 
        else:
 
            q = q.filter(ChangesetComment.line_no == None) \
 
                .filter(ChangesetComment.f_path == None)
 

	
 
        if revision is not None:
 
            q = q.filter(ChangesetComment.revision == revision) \
 
                .filter(ChangesetComment.repo_id == repo_id)
 
        elif pull_request is not None:
 
            pull_request = PullRequest.guess_instance(pull_request)
 
            q = q.filter(ChangesetComment.pull_request == pull_request)
 
        else:
 
            raise Exception('Please specify either revision or pull_request')
 

	
 
        return q.order_by(ChangesetComment.created_on).all()
kallithea/model/repo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import os
 
import shutil
 
import logging
 
import traceback
 
from datetime import datetime
 
from sqlalchemy.orm import subqueryload
 

	
 
from kallithea.lib.utils import make_ui
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.lib.compat import json
 
from kallithea.lib.utils2 import LazyProperty, safe_str, safe_unicode, \
 
    remove_prefix, obfuscate_url_pw, get_current_authuser
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.hooks import log_delete_repository
 

	
 
from kallithea.model.base import BaseModel
 
from kallithea.model.db import Repository, UserRepoToPerm, UserGroupRepoToPerm, \
 
    UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, \
 
    Statistics, UserGroup, Ui, RepoGroup, RepositoryField
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionAny, HasUserGroupPermissionAny
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.model.scm import UserGroupList
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(BaseModel):
 

	
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _get_user_group(self, users_group):
 
        return UserGroup.guess_instance(users_group)
 

	
 
    def _get_repo_group(self, repo_group):
 
        return RepoGroup.guess_instance(repo_group)
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
        repo_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        repo_to_perm.repository = repository
 
        repo_to_perm.user_id = def_user.user_id
 

	
 
        return repo_to_perm
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 
        return q.ui_value
 

	
 
    def get(self, repo_id, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_id == repo_id)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_id))
 
        return repo.scalar()
 

	
 
    def get_repo(self, repository):
 
        return self._get_repo(repository)
 
        return Repository.guess_instance(repository)
 

	
 
    def get_by_repo_name(self, repo_name, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_name == repo_name)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_name))
 
        return repo.scalar()
 

	
 
    def get_all_user_repos(self, user):
 
        """
 
        Gets all repositories that user have at least read access
 

	
 
        :param user:
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        user = User.guess_instance(user)
 
        repos = AuthUser(dbuser=user).permissions['repositories']
 
        access_check = lambda r: r[1] in ['repository.read',
 
                                          'repository.write',
 
                                          'repository.admin']
 
        repos = [x[0] for x in filter(access_check, repos.items())]
 
        return Repository.query().filter(Repository.repo_name.in_(repos))
 

	
 
    def get_users_js(self):
 
        users = self.sa.query(User) \
 
            .filter(User.active == True) \
 
            .order_by(User.name, User.lastname) \
 
            .all()
 
        return json.dumps([
 
            {
 
                'id': u.user_id,
 
                'fname': h.escape(u.name),
 
                'lname': h.escape(u.lastname),
 
                'nname': u.username,
 
                'gravatar_lnk': h.gravatar_url(u.email, size=28, default='default'),
 
                'gravatar_size': 14,
 
            } for u in users]
 
        )
 

	
 
    def get_user_groups_js(self):
 
        user_groups = self.sa.query(UserGroup) \
 
            .filter(UserGroup.users_group_active == True) \
 
            .order_by(UserGroup.users_group_name) \
 
            .options(subqueryload(UserGroup.members)) \
 
            .all()
 
        user_groups = UserGroupList(user_groups, perm_set=['usergroup.read',
 
                                                           'usergroup.write',
 
                                                           'usergroup.admin'])
 
        return json.dumps([
 
            {
 
                'id': gr.users_group_id,
 
                'grname': gr.users_group_name,
 
                'grmembers': len(gr.members),
 
            } for gr in user_groups]
 
        )
 

	
 
    @classmethod
 
    def _render_datatable(cls, tmpl, *args, **kwargs):
 
        import kallithea
 
        from pylons import tmpl_context as c, request
 
        from pylons.i18n.translation import _
 

	
 
        _tmpl_lookup = kallithea.CONFIG['pylons.app_globals'].mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        tmpl = template.get_def(tmpl)
 
        kwargs.update(dict(_=_, h=h, c=c, request=request))
 
        return tmpl.render(*args, **kwargs)
 

	
 
    def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
 
                          super_user_actions=False, short_name=False):
 
        _render = self._render_datatable
 
        from pylons import tmpl_context as c
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=short_name, admin=False)
 

	
 
        def last_change(last_change):
 
            return _render("last_change", last_change)
 

	
 
        def rss_lnk(repo_name):
 
            return _render("rss", repo_name)
 

	
 
        def atom_lnk(repo_name):
 
            return _render("atom", repo_name)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metatags)
 

	
 
        def state(repo_state):
 
            return _render("repo_state", repo_state)
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name, super_user_actions)
 

	
 
        def owner_actions(owner_id, username):
 
            return _render('user_name', owner_id, username)
 

	
 
        repos_data = []
 
        for repo in repos_list:
 
            if perm_check:
 
                # check permission at this level
 
                if not HasRepoPermissionAny(
 
                        'repository.read', 'repository.write',
 
                        'repository.admin'
 
                )(repo.repo_name, 'get_repos_as_dict check'):
 
                    continue
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "raw_name": repo.repo_name,
 
                "just_name": repo.just_name,
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_change_iso": repo.last_db_change.isoformat(),
 
                "last_change": last_change(repo.last_db_change),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "desc": desc(repo.description),
 
                "owner": h.person(repo.owner),
 
                "state": state(repo.repo_state),
 
                "rss": rss_lnk(repo.repo_name),
 
                "atom": atom_lnk(repo.repo_name),
 

	
 
            }
 
            if admin:
 
                row.update({
 
                    "action": repo_actions(repo.repo_name),
 
                    "owner": owner_actions(repo.owner_id,
 
                                           h.person(repo.owner))
 
                })
 
            repos_data.append(row)
 

	
 
        return {
 
            "totalRecords": len(repos_list),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": repos_data
 
        }
 

	
 
    def _get_defaults(self, repo_name):
 
        """
 
        Gets information about repository, and returns a dict for
 
        usage in forms
 

	
 
        :param repo_name:
 
        """
 

	
 
        repo_info = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_info is None:
 
            return None
 

	
 
        defaults = repo_info.get_dict()
 
        defaults['repo_name'] = repo_info.just_name
 
        defaults['repo_group'] = repo_info.group_id
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'), (1, 'repo_enable_locking'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.owner:
 
            defaults.update({'owner': repo_info.owner.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'owner': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = self._get_repo(repo)
 
            cur_repo = Repository.guess_instance(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'owner' in kwargs:
 
                cur_repo.owner = User.get_by_username(kwargs['owner'])
 

	
 
            if 'repo_group' in kwargs:
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
                cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name)
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_enable_locking',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                cur_repo.clone_uri = clone_uri
 

	
 
            if 'repo_name' in kwargs:
 
                cur_repo.repo_name = cur_repo.get_new_name(kwargs['repo_name'])
 

	
 
            #if private flag is set, reset default permission to NONE
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                #handle extra fields
 
            for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
 
                                kwargs):
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 
                    self.sa.add(ex_field)
 
            self.sa.add(cur_repo)
 

	
 
            if org_repo_name != cur_repo.repo_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
 
                     private=False, clone_uri=None, repo_group=None,
 
                     landing_rev='rev:tip', fork_of=None,
 
                     copy_fork_permissions=False, enable_statistics=False,
 
                     enable_locking=False, enable_downloads=False,
 
                     copy_group_permissions=False, state=Repository.STATE_PENDING):
 
        """
 
        Create repository inside database with PENDING state. This should only be
 
        executed by create() repo, with exception of importing existing repos.
 

	
 
        """
 
        from kallithea.model.scm import ScmModel
 

	
 
        owner = User.guess_instance(owner)
 
        fork_of = self._get_repo(fork_of)
 
        fork_of = Repository.guess_instance(fork_of)
 
        repo_group = self._get_repo_group(repo_group)
 
        try:
 
            repo_name = safe_unicode(repo_name)
 
            description = safe_unicode(description)
 
            # repo name is just a name of repository
 
            # while repo_name_full is a full qualified name that is combined
 
            # with name and path of group
 
            repo_name_full = repo_name
 
            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
 

	
 
            new_repo = Repository()
 
            new_repo.repo_state = state
 
            new_repo.enable_statistics = False
 
            new_repo.repo_name = repo_name_full
 
            new_repo.repo_type = repo_type
 
            new_repo.owner = owner
 
            new_repo.group = repo_group
 
            new_repo.description = description or repo_name
 
            new_repo.private = private
 
            new_repo.clone_uri = clone_uri
 
            new_repo.landing_rev = landing_rev
 

	
 
            new_repo.enable_statistics = enable_statistics
 
            new_repo.enable_locking = enable_locking
 
            new_repo.enable_downloads = enable_downloads
 

	
 
            if repo_group:
 
                new_repo.enable_locking = repo_group.enable_locking
 

	
 
            if fork_of:
 
                parent_repo = fork_of
 
                new_repo.fork = parent_repo
 

	
 
            self.sa.add(new_repo)
 

	
 
            if fork_of and copy_fork_permissions:
 
                repo = fork_of
 
                user_perms = UserRepoToPerm.query() \
 
                    .filter(UserRepoToPerm.repository == repo).all()
 
                group_perms = UserGroupRepoToPerm.query() \
 
                    .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                for perm in user_perms:
 
                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 

	
 
                for perm in group_perms:
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                               perm.permission)
 

	
 
            elif repo_group and copy_group_permissions:
 

	
 
                user_perms = UserRepoGroupToPerm.query() \
 
                    .filter(UserRepoGroupToPerm.group == repo_group).all()
 

	
 
                group_perms = UserGroupRepoGroupToPerm.query() \
 
                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
 

	
 
                for perm in user_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 

	
 
                for perm in group_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 

	
 
            else:
 
                perm_obj = self._create_default_perms(new_repo, private)
 
                self.sa.add(perm_obj)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
 
                                                    owner.user_id)
 
            # we need to flush here, in order to check if database won't
 
            # throw any exceptions, create filesystem dirs at the very end
 
            self.sa.flush()
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user):
 
        """
 
        Create repository using celery tasks
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks
 
        return tasks.create_repo(form_data, cur_user)
 

	
 
    def _update_permissions(self, repo, perms_new=None, perms_updates=None,
 
                            check_perms=True):
 
        if not perms_new:
 
            perms_new = []
 
        if not perms_updates:
 
            perms_updates = []
 

	
 
        # update permissions
 
        for member, perm, member_type in perms_updates:
 
            if member_type == 'user':
 
                # this updates existing one
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 
            # set new permissions
 
        for member, perm, member_type in perms_new:
 
            if member_type == 'user':
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 

	
 
    def create_fork(self, form_data, cur_user):
 
        """
 
        Simple wrapper into executing celery task for fork creation
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks
 
        return tasks.create_repo_fork(form_data, cur_user)
 

	
 
    def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
 
        """
 
        Delete given repository, forks parameter defines what do do with
 
        attached forks. Throws AttachedForksError if deleted repo has attached
 
        forks
 

	
 
        :param repo:
 
        :param forks: str 'delete' or 'detach'
 
        :param fs_remove: remove(archive) repo from filesystem
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        if repo is not None:
 
            if forks == 'detach':
 
                for r in repo.forks:
 
                    r.fork = None
 
                    self.sa.add(r)
 
            elif forks == 'delete':
 
                for r in repo.forks:
 
                    self.delete(r, forks='delete')
 
            elif [f for f in repo.forks]:
 
                raise AttachedForksError()
 

	
 
            old_repo_dict = repo.get_dict()
 
            try:
 
                self.sa.delete(repo)
 
                if fs_remove:
 
                    self._delete_filesystem_repo(repo)
 
                else:
 
                    log.debug('skipping removal from filesystem')
 
                log_delete_repository(old_repo_dict,
 
                                      deleted_by=cur_user)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
    def grant_user_permission(self, repo, user, perm):
 
        """
 
        Grant permission for user on given repository, or update existing one
 
        if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        user = User.guess_instance(user)
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserRepoToPerm()
 
        obj.repository = repo
 
        obj.user = user
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, user, repo)
 
        return obj
 

	
 
    def revoke_user_permission(self, repo, user):
 
        """
 
        Revoke permission for user on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user = User.guess_instance(user)
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 

	
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm on %s on %s', repo, user)
 

	
 
    def grant_user_group_permission(self, repo, group_name, perm):
 
        """
 
        Grant permission for user group on given repository, or update
 
        existing one if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        group_name = self._get_user_group(group_name)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .scalar()
 

	
 
        if obj is None:
 
            # create new
 
            obj = UserGroupRepoToPerm()
 

	
 
        obj.repository = repo
 
        obj.users_group = group_name
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, repo, group_name):
 
        """
 
        Revoke permission for user group on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        """
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        group_name = self._get_user_group(group_name)
 

	
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm to %s on %s', repo, group_name)
 

	
 
    def delete_stats(self, repo_name):
 
        """
 
        removes stats for given repo
 

	
 
        :param repo_name:
 
        """
 
        repo = self._get_repo(repo_name)
 
        repo = Repository.guess_instance(repo_name)
 
        try:
 
            obj = self.sa.query(Statistics) \
 
                .filter(Statistics.repository == repo).scalar()
 
            if obj is not None:
 
                self.sa.delete(obj)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
 
                                clone_uri=None, repo_store_location=None):
 
        """
 
        Makes repository on filesystem. Operation is group aware, meaning that it will create
 
        a repository within a group, and alter the paths accordingly to the group location.
 

	
 
        :param repo_name:
 
        :param alias:
 
        :param parent:
 
        :param clone_uri:
 
        :param repo_store_location:
 
        """
 
        from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
 
        from kallithea.model.scm import ScmModel
 

	
 
        if '/' in repo_name:
 
            raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
 
        else:
 
            new_parent_path = repo_group or ''
 

	
 
        if repo_store_location:
 
            _paths = [repo_store_location]
 
        else:
 
            _paths = [self.repos_path, new_parent_path, repo_name]
 
            # we need to make it str for mercurial
 
        repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
 

	
 
        # check if this path is not a repository
 
        if is_valid_repo(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid repository' % repo_path)
 

	
 
        # check if this path is a group
 
        if is_valid_repo_group(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid group' % repo_path)
 

	
 
        log.info('creating repo %s in %s from url: `%s`',
 
            repo_name, safe_unicode(repo_path),
 
            obfuscate_url_pw(clone_uri))
 

	
 
        backend = get_backend(repo_type)
 

	
 
        if repo_type == 'hg':
 
            baseui = make_ui('db', clear_session=False)
 
            # patch and reset hooks section of UI config to not run any
 
            # hooks on creating remote repo
 
            for k, v in baseui.configitems('hooks'):
 
                baseui.setconfig('hooks', k, None)
 

	
 
            repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui)
 
        elif repo_type == 'git':
 
            repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
 
            # add kallithea hook into this repo
 
            ScmModel().install_git_hooks(repo=repo)
 
        else:
 
            raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 

	
 
        log.debug('Created repo %s with %s backend',
 
                  safe_unicode(repo_name), safe_unicode(repo_type))
 
        return repo
 

	
 
    def _rename_filesystem_repo(self, old, new):
 
        """
 
        renames repository on filesystem
 

	
 
        :param old: old name
 
        :param new: new name
 
        """
 
        log.info('renaming repo from %s to %s', old, new)
 

	
 
        old_path = safe_str(os.path.join(self.repos_path, old))
 
        new_path = safe_str(os.path.join(self.repos_path, new))
 
        if os.path.isdir(new_path):
 
            raise Exception(
 
                'Was trying to rename to already existing dir %s' % new_path
 
            )
 
        shutil.move(old_path, new_path)
 

	
 
    def _delete_filesystem_repo(self, repo):
 
        """
 
        removes repo from filesystem, the removal is actually done by
 
        renaming dir to a 'rm__*' prefix which Kallithea will skip.
 
        It can be undeleted later by reverting the rename.
 

	
 
        :param repo: repo object
 
        """
 
        rm_path = safe_str(os.path.join(self.repos_path, repo.repo_name))
 
        log.info("Removing %s", rm_path)
 

	
 
        _now = datetime.now()
 
        _ms = str(_now.microsecond).rjust(6, '0')
 
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
 
                             repo.just_name)
 
        if repo.group:
 
            args = repo.group.full_path_splitted + [_d]
 
            _d = os.path.join(*args)
 
        if os.path.exists(rm_path):
 
            shutil.move(rm_path, safe_str(os.path.join(self.repos_path, _d)))
 
        else:
 
            log.error("Can't find repo to delete in %r", rm_path)
kallithea/model/repo_permission.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo_permission
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
repository permission model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 1, 2011
 
:author: nvinot, marcink
 
"""
 

	
 
import logging
 
from kallithea.model.base import BaseModel
 
from kallithea.model.db import User, UserRepoToPerm, UserGroupRepoToPerm, \
 
    Permission
 
    Permission, Repository
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepositoryPermissionModel(BaseModel):
 

	
 
    def get_user_permission(self, repository, user):
 
        repository = self._get_repo(repository)
 
        repository = Repository.guess_instance(repository)
 
        user = User.guess_instance(user)
 

	
 
        return UserRepoToPerm.query() \
 
                .filter(UserRepoToPerm.user == user) \
 
                .filter(UserRepoToPerm.repository == repository) \
 
                .scalar()
 

	
 
    def update_user_permission(self, repository, user, permission):
 
        permission = Permission.get_by_key(permission)
 
        current = self.get_user_permission(repository, user)
 
        if current:
 
            if not current.permission is permission:
 
                current.permission = permission
 
        else:
 
            p = UserRepoToPerm()
 
            p.user = user
 
            p.repository = repository
 
            p.permission = permission
 
            self.sa.add(p)
 

	
 
    def delete_user_permission(self, repository, user):
 
        current = self.get_user_permission(repository, user)
 
        if current:
 
            self.sa.delete(current)
 

	
 
    def get_users_group_permission(self, repository, users_group):
 
        return UserGroupRepoToPerm.query() \
 
                .filter(UserGroupRepoToPerm.users_group == users_group) \
 
                .filter(UserGroupRepoToPerm.repository == repository) \
 
                .scalar()
 

	
 
    def update_users_group_permission(self, repository, users_group,
 
                                      permission):
 
        permission = Permission.get_by_key(permission)
 
        current = self.get_users_group_permission(repository, users_group)
 
        if current:
 
            if not current.permission is permission:
 
                current.permission = permission
 
        else:
 
            p = UserGroupRepoToPerm()
 
            p.users_group = users_group
 
            p.repository = repository
 
            p.permission = permission
 
            self.sa.add(p)
 

	
 
    def delete_users_group_permission(self, repository, users_group):
 
        current = self.get_users_group_permission(repository, users_group)
 
        if current:
 
            self.sa.delete(current)
 

	
 
    def update_or_delete_user_permission(self, repository, user, permission):
 
        if permission:
 
            self.update_user_permission(repository, user, permission)
 
        else:
 
            self.delete_user_permission(repository, user)
 

	
 
    def update_or_delete_users_group_permission(self, repository, user_group,
 
                                              permission):
 
        if permission:
 
            self.update_users_group_permission(repository, user_group,
 
                                               permission)
 
        else:
 
            self.delete_users_group_permission(repository, user_group)
kallithea/model/scm.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.scm
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Scm model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import sys
 
import posixpath
 
import re
 
import time
 
import traceback
 
import logging
 
import cStringIO
 
import pkg_resources
 

	
 
from sqlalchemy import func
 
from pylons.i18n.translation import _
 

	
 
import kallithea
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.exceptions import RepositoryError
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea import BACKENDS
 
from kallithea.lib import helpers as h
 
from kallithea.lib.utils2 import safe_str, safe_unicode, get_server_url, \
 
    _set_extras
 
from kallithea.lib.auth import HasRepoPermissionAny, HasRepoGroupPermissionAny, \
 
    HasUserGroupPermissionAny, HasPermissionAny, HasPermissionAny
 
from kallithea.lib.utils import get_filesystem_repos, make_ui, \
 
    action_logger
 
from kallithea.model.base import BaseModel
 
from kallithea.model.db import Repository, Ui, CacheInvalidation, \
 
    UserFollowing, UserLog, User, RepoGroup, PullRequest
 
from kallithea.lib.hooks import log_push_action
 
from kallithea.lib.exceptions import NonRelativePathError, IMCCommitError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
 

	
 

	
 
class _PermCheckIterator(object):
 
    def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None):
 
        """
 
        Creates iterator from given list of objects, additionally
 
        checking permission for them from perm_set var
 

	
 
        :param obj_list: list of db objects
 
        :param obj_attr: attribute of object to pass into perm_checker
 
        :param perm_set: list of permissions to check
 
        :param perm_checker: callable to check permissions against
 
        """
 
        self.obj_list = obj_list
 
        self.obj_attr = obj_attr
 
        self.perm_set = perm_set
 
        self.perm_checker = perm_checker
 
        self.extra_kwargs = extra_kwargs or {}
 

	
 
    def __len__(self):
 
        return len(self.obj_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        for db_obj in self.obj_list:
 
            # check permission at this level
 
            name = getattr(db_obj, self.obj_attr, None)
 
            if not self.perm_checker(*self.perm_set)(
 
                    name, self.__class__.__name__, **self.extra_kwargs):
 
                continue
 

	
 
            yield db_obj
 

	
 

	
 
class RepoList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write', 'repository.admin']
 

	
 
        super(RepoList, self).__init__(obj_list=db_repo_list,
 
                    obj_attr='repo_name', perm_set=perm_set,
 
                    perm_checker=HasRepoPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class RepoGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['group.read', 'group.write', 'group.admin']
 

	
 
        super(RepoGroupList, self).__init__(obj_list=db_repo_group_list,
 
                    obj_attr='group_name', perm_set=perm_set,
 
                    perm_checker=HasRepoGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class UserGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
 

	
 
        super(UserGroupList, self).__init__(obj_list=db_user_group_list,
 
                    obj_attr='users_group_name', perm_set=perm_set,
 
                    perm_checker=HasUserGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class ScmModel(BaseModel):
 
    """
 
    Generic Scm Model
 
    """
 

	
 
    def __get_repo(self, instance):
 
        cls = Repository
 
        if isinstance(instance, cls):
 
            return instance
 
        elif isinstance(instance, int) or safe_str(instance).isdigit():
 
            return cls.get(instance)
 
        elif isinstance(instance, basestring):
 
            return cls.get_by_repo_name(instance)
 
        elif instance is not None:
 
            raise Exception('given object must be int, basestr or Instance'
 
                            ' of %s got %s' % (type(cls), type(instance)))
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 

	
 
        return q.ui_value
 

	
 
    def repo_scan(self, repos_path=None):
 
        """
 
        Listing of repositories in given path. This path should not be a
 
        repository itself. Return a dictionary of repository objects
 

	
 
        :param repos_path: path to directory containing repositories
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s', repos_path)
 

	
 
        baseui = make_ui('db')
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for kallithea
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories', len(repos))
 
        return repos
 

	
 
    def get_repos(self, repos):
 
        """Return the repos the user has access to"""
 
        return RepoList(repos)
 

	
 
    def get_repo_groups(self, groups=None):
 
        """Return the repo groups the user has access to
 
        If no groups are specified, use top level groups.
 
        """
 
        if groups is None:
 
            groups = RepoGroup.query() \
 
                .filter(RepoGroup.parent_group_id == None).all()
 
        return RepoGroupList(groups)
 

	
 
    def mark_for_invalidation(self, repo_name):
 
        """
 
        Mark caches of this repo invalid in the database.
 

	
 
        :param repo_name: the repo for which caches should be marked invalid
 
        """
 
        CacheInvalidation.set_invalidate(repo_name)
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is not None:
 
            repo.update_changeset_cache()
 

	
 
    def toggle_following_repo(self, follow_repo_id, user_id):
 

	
 
        f = self.sa.query(UserFollowing) \
 
            .filter(UserFollowing.follows_repository_id == follow_repo_id) \
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                action_logger(UserTemp(user_id),
 
                              'stopped_following_repo',
 
                              RepoTemp(follow_repo_id))
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_repository_id = follow_repo_id
 
            self.sa.add(f)
 

	
 
            action_logger(UserTemp(user_id),
 
                          'started_following_repo',
 
                          RepoTemp(follow_repo_id))
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def toggle_following_user(self, follow_user_id, user_id):
 
        f = self.sa.query(UserFollowing) \
 
            .filter(UserFollowing.follows_user_id == follow_user_id) \
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_user_id = follow_user_id
 
            self.sa.add(f)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def is_following_repo(self, repo_name, user_id, cache=False):
 
        r = self.sa.query(Repository) \
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        f = self.sa.query(UserFollowing) \
 
            .filter(UserFollowing.follows_repository == r) \
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def is_following_user(self, username, user_id, cache=False):
 
        u = User.get_by_username(username)
 

	
 
        f = self.sa.query(UserFollowing) \
 
            .filter(UserFollowing.follows_user == u) \
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def get_followers(self, repo):
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 

	
 
        return self.sa.query(UserFollowing) \
 
                .filter(UserFollowing.follows_repository == repo).count()
 

	
 
    def get_forks(self, repo):
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        return self.sa.query(Repository) \
 
                .filter(Repository.fork == repo).count()
 

	
 
    def get_pull_requests(self, repo):
 
        repo = self._get_repo(repo)
 
        repo = Repository.guess_instance(repo)
 
        return self.sa.query(PullRequest) \
 
                .filter(PullRequest.other_repo == repo) \
 
                .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
 

	
 
    def mark_as_fork(self, repo, fork, user):
 
        repo = self.__get_repo(repo)
 
        fork = self.__get_repo(fork)
 
        if fork and repo.repo_id == fork.repo_id:
 
            raise Exception("Cannot set repository as fork of itself")
 

	
 
        if fork and repo.repo_type != fork.repo_type:
 
            raise RepositoryError("Cannot set repository as fork of repository with other type")
 

	
 
        repo.fork = fork
 
        self.sa.add(repo)
 
        return repo
 

	
 
    def _handle_rc_scm_extras(self, username, repo_name, repo_alias,
 
                              action=None):
 
        from kallithea import CONFIG
 
        from kallithea.lib.base import _get_ip_addr
 
        try:
 
            from pylons import request
 
            environ = request.environ
 
        except TypeError:
 
            # we might use this outside of request context, let's fake the
 
            # environ data
 
            from webob import Request
 
            environ = Request.blank('').environ
 
        extras = {
 
            'ip': _get_ip_addr(environ),
 
            'username': username,
 
            'action': action or 'push_local',
 
            'repository': repo_name,
 
            'scm': repo_alias,
 
            'config': CONFIG['__file__'],
 
            'server_url': get_server_url(environ),
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        _set_extras(extras)
 

	
 
    def _handle_push(self, repo, username, action, repo_name, revisions):
 
        """
 
        Triggers push action hooks
 

	
 
        :param repo: SCM repo
 
        :param username: username who pushes
 
        :param action: push/push_local/push_remote
 
        :param repo_name: name of repo
 
        :param revisions: list of revisions that we pushed
 
        """
 
        self._handle_rc_scm_extras(username, repo_name, repo_alias=repo.alias)
 
        _scm_repo = repo._repo
 
        # trigger push hook
 
        if repo.alias == 'hg':
 
            log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0])
 
        elif repo.alias == 'git':
 
            log_push_action(None, _scm_repo, _git_revs=revisions)
 

	
 
    def _get_IMC_module(self, scm_type):
 
        """
 
        Returns InMemoryCommit class based on scm_type
 

	
 
        :param scm_type:
 
        """
 
        if scm_type == 'hg':
 
            from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset
 
            return MercurialInMemoryChangeset
 

	
 
        if scm_type == 'git':
 
            from kallithea.lib.vcs.backends.git import GitInMemoryChangeset
 
            return GitInMemoryChangeset
 

	
 
        raise Exception('Invalid scm_type, must be one of hg,git got %s'
 
                        % (scm_type,))
 

	
 
    def pull_changes(self, repo, username):
 
        """
 
        Pull from "clone URL".
 
        """
 
        dbrepo = self.__get_repo(repo)
 
        clone_uri = dbrepo.clone_uri
 
        if not clone_uri:
 
            raise Exception("This repository doesn't have a clone uri")
 

	
 
        repo = dbrepo.scm_instance
 
        repo_name = dbrepo.repo_name
 
        try:
 
            if repo.alias == 'git':
 
                repo.fetch(clone_uri)
 
                # git doesn't really have something like post-fetch action
 
                # we fake that now. #TODO: extract fetched revisions somehow
 
                # here
 
                self._handle_push(repo,
 
                                  username=username,
 
                                  action='push_remote',
 
                                  repo_name=repo_name,
 
                                  revisions=[])
 
            else:
 
                self._handle_rc_scm_extras(username, dbrepo.repo_name,
 
                                           repo.alias, action='push_remote')
 
                repo.pull(clone_uri)
 

	
 
            self.mark_for_invalidation(repo_name)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, author, message,
 
                      content, f_path):
 
        """
 
        Commit a change to a single file
 

	
 
        :param repo: a db_repo.scm_instance
 
        """
 
        user = User.guess_instance(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        # message and author needs to be unicode
 
        # proper backend should then translate that into required type
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        imc = IMC(repo)
 
        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
 
        try:
 
            tip = imc.commit(message=message, author=author,
 
                             parents=[cs], branch=cs.branch)
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            raise IMCCommitError(str(e))
 
        finally:
 
            # always clear caches, if commit fails we want fresh object also
 
            self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def _sanitize_path(self, f_path):
 
        if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
 
            raise NonRelativePathError('%s is not an relative path' % f_path)
 
        if f_path:
 
            f_path = posixpath.normpath(f_path)
 
        return f_path
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
        """
 
        Recursively walk root dir and return a set of all paths found.
 

	
 
        :param repo_name: name of repository
 
        :param revision: revision for which to list nodes
 
        :param root_path: root path to list
 
        :param flat: return as a list, if False returns a dict with description
 

	
 
        """
 
        _files = list()
 
        _dirs = list()
 
        try:
 
            _repo = self.__get_repo(repo_name)
 
            changeset = _repo.scm_instance.get_changeset(revision)
 
            root_path = root_path.lstrip('/')
 
            for topnode, dirs, files in changeset.walk(root_path):
 
                for f in files:
 
                    _files.append(f.path if flat else {"name": f.path,
 
                                                       "type": "file"})
 
                for d in dirs:
 
                    _dirs.append(d.path if flat else {"name": d.path,
 
                                                      "type": "dir"})
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            raise
 

	
 
        return _dirs, _files
 

	
 
    def create_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset
 
        """
 

	
 
        user = User.guess_instance(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            content = nodes[f_path]['content']
 
            f_path = self._sanitize_path(f_path)
 
            f_path = safe_str(f_path)
 
            # decoding here will force that we have proper encoded values
 
            # in any other case this will throw exceptions and deny commit
 
            if isinstance(content, (basestring,)):
 
                content = safe_str(content)
 
            elif isinstance(content, (file, cStringIO.OutputType,)):
 
                content = content.read()
 
            else:
 
                raise Exception('Content is of unrecognized type %s' % (
 
                    type(content)
 
                ))
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.add(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def update_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo. Again.
 
        """
 
        user = User.guess_instance(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        imc_class = self._get_IMC_module(scm_instance.alias)
 
        imc = imc_class(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 

	
 
        # add multiple nodes
 
        for _filename, data in nodes.items():
 
            # new filename, can be renamed from the old one
 
            filename = self._sanitize_path(data['filename'])
 
            old_filename = self._sanitize_path(_filename)
 
            content = data['content']
 

	
 
            filenode = FileNode(old_filename, content=content)
 
            op = data['op']
 
            if op == 'add':
 
                imc.add(filenode)
 
            elif op == 'del':
 
                imc.remove(filenode)
 
            elif op == 'mod':
 
                if filename != old_filename:
 
                    #TODO: handle renames, needs vcs lib changes
 
                    imc.remove(filenode)
 
                    imc.add(FileNode(filename, content=content))
 
                else:
 
                    imc.change(filenode)
 

	
 
        # commit changes
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 

	
 
    def delete_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Deletes specified nodes from repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset after deletion
 
        """
 

	
 
        user = User.guess_instance(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            # content can be empty but for compatibility it allows same dicts
 
            # structure as add_nodes
 
            content = nodes[f_path].get('content')
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.remove(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def get_unread_journal(self):
 
        return self.sa.query(UserLog).count()
 

	
 
    def get_repo_landing_revs(self, repo=None):
 
        """
 
        Generates select option with tags branches and bookmarks (for hg only)
 
        grouped by type
 

	
 
        :param repo:
 
        """
 

	
 
        hist_l = []
 
        choices = []
 
        repo = self.__get_repo(repo)
 
        hist_l.append(['rev:tip', _('latest tip')])
 
        choices.append('rev:tip')
 
        if repo is None:
 
            return choices, hist_l
 

	
 
        repo = repo.scm_instance
 

	
 
        branches_group = ([(u'branch:%s' % k, k) for k, v in
 
                           repo.branches.iteritems()], _("Branches"))
 
        hist_l.append(branches_group)
 
        choices.extend([x[0] for x in branches_group[0]])
 

	
 
        if repo.alias == 'hg':
 
            bookmarks_group = ([(u'book:%s' % k, k) for k, v in
 
                                repo.bookmarks.iteritems()], _("Bookmarks"))
 
            hist_l.append(bookmarks_group)
 
            choices.extend([x[0] for x in bookmarks_group[0]])
 

	
 
        tags_group = ([(u'tag:%s' % k, k) for k, v in
 
                       repo.tags.iteritems()], _("Tags"))
 
        hist_l.append(tags_group)
 
        choices.extend([x[0] for x in tags_group[0]])
 

	
 
        return choices, hist_l
 

	
 
    def install_git_hooks(self, repo, force_create=False):
 
        """
 
        Creates a kallithea hook inside a git repository
 

	
 
        :param repo: Instance of VCS repo
 
        :param force_create: Create even if same name hook exists
 
        """
 

	
 
        loc = os.path.join(repo.path, 'hooks')
 
        if not repo.bare:
 
            loc = os.path.join(repo.path, '.git', 'hooks')
 
        if not os.path.isdir(loc):
 
            os.makedirs(loc)
 

	
 
        tmpl_post = "#!/usr/bin/env %s\n" % sys.executable or 'python2'
 
        tmpl_post += pkg_resources.resource_string(
 
            'kallithea', os.path.join('config', 'post_receive_tmpl.py')
 
        )
 
        tmpl_pre = "#!/usr/bin/env %s\n" % sys.executable or 'python2'
 
        tmpl_pre += pkg_resources.resource_string(
 
            'kallithea', os.path.join('config', 'pre_receive_tmpl.py')
 
        )
 

	
 
        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
 
            _hook_file = os.path.join(loc, '%s-receive' % h_type)
 
            has_hook = False
 
            log.debug('Installing git hook in repo %s', repo)
 
            if os.path.exists(_hook_file):
 
                # let's take a look at this hook, maybe it's kallithea ?
 
                log.debug('hook exists, checking if it is from kallithea')
 
                with open(_hook_file, 'rb') as f:
 
                    data = f.read()
 
                    matches = re.compile(r'(?:%s)\s*=\s*(.*)'
 
                                         % 'KALLITHEA_HOOK_VER').search(data)
 
                    if matches:
 
                        try:
 
                            ver = matches.groups()[0]
 
                            log.debug('got %s it is kallithea', ver)
 
                            has_hook = True
 
                        except Exception:
 
                            log.error(traceback.format_exc())
 
            else:
 
                # there is no hook in this dir, so we want to create one
 
                has_hook = True
 

	
 
            if has_hook or force_create:
 
                log.debug('writing %s hook file !', h_type)
 
                try:
 
                    with open(_hook_file, 'wb') as f:
 
                        tmpl = tmpl.replace('_TMPL_', kallithea.__version__)
 
                        f.write(tmpl)
 
                    os.chmod(_hook_file, 0755)
 
                except IOError as e:
 
                    log.error('error writing %s: %s', _hook_file, e)
 
            else:
 
                log.debug('skipping writing hook file')
 

	
 
def AvailableRepoGroupChoices(top_perms, repo_group_perms, extras=()):
 
    """Return group_id,string tuples with choices for all the repo groups where
 
    the user has the necessary permissions.
 

	
 
    Top level is -1.
 
    """
 
    groups = RepoGroup.query().all()
 
    if HasPermissionAny('hg.admin')('available repo groups'):
 
        groups.append(None)
 
    else:
 
        groups = list(RepoGroupList(groups, perm_set=repo_group_perms))
 
        if top_perms and HasPermissionAny(*top_perms)('available repo groups'):
 
            groups.append(None)
 
        for extra in extras:
 
            if not any(rg == extra for rg in groups):
 
                groups.append(extra)
 
    return RepoGroup.groups_choices(groups=groups)
0 comments (0 inline, 0 general)