Changeset - e85f08375dc6
[Not reviewed]
default
0 5 0
Mads Kiilerich - 8 years ago 2017-10-03 00:14:40
mads@kiilerich.com
diffs: drop the DiffLimitExceeded container - just make it a flag available as property

Keep it simple.
5 files changed with 10 insertions and 40 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/changeset.py
Show inline comments
 
@@ -3,97 +3,96 @@
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.changeset
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
changeset controller showing changes between revisions
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 25, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
from collections import defaultdict
 

	
 
from tg import tmpl_context as c, request, response
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPForbidden, HTTPBadRequest, HTTPNotFound
 

	
 
from kallithea.lib.vcs.exceptions import RepositoryError, \
 
    ChangesetDoesNotExistError, EmptyRepositoryError
 

	
 
import kallithea.lib.helpers as h
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator, \
 
    NotAnonymous
 
from kallithea.lib.base import BaseRepoController, render, jsonify
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.compat import OrderedDict
 
from kallithea.lib import diffs
 
from kallithea.model.db import ChangesetComment, ChangesetStatus
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.lib.exceptions import StatusChangeOnClosedPullRequestError
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.utils2 import safe_unicode
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _update_with_GET(params, GET):
 
    for k in ['diff1', 'diff2', 'diff']:
 
        params[k] += GET.getall(k)
 

	
 

	
 
def anchor_url(revision, path, GET):
 
    fid = h.FID(revision, path)
 
    return h.url.current(anchor=fid, **dict(GET))
 

	
 

	
 
def get_ignore_ws(fid, GET):
 
    ig_ws_global = GET.get('ignorews')
 
    ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
 
    if ig_ws:
 
        try:
 
            return int(ig_ws[0].split(':')[-1])
 
        except ValueError:
 
            raise HTTPBadRequest()
 
    return ig_ws_global
 

	
 

	
 
def _ignorews_url(GET, fileid=None):
 
    fileid = str(fileid) if fileid else None
 
    params = defaultdict(list)
 
    _update_with_GET(params, GET)
 
    lbl = _('Show whitespace')
 
    ig_ws = get_ignore_ws(fileid, GET)
 
    ln_ctx = get_line_ctx(fileid, GET)
 
    # global option
 
    if fileid is None:
 
        if ig_ws is None:
 
            params['ignorews'] += [1]
 
            lbl = _('Ignore whitespace')
 
        ctx_key = 'context'
 
        ctx_val = ln_ctx
 
    # per file options
 
    else:
 
        if ig_ws is None:
 
            params[fileid] += ['WS:1']
 
            lbl = _('Ignore whitespace')
 
@@ -234,99 +233,97 @@ class ChangesetController(BaseRepoContro
 
        c.changes = OrderedDict()
 

	
 
        c.lines_added = 0  # count of lines added
 
        c.lines_deleted = 0  # count of lines removes
 

	
 
        c.changeset_statuses = ChangesetStatus.STATUSES
 
        comments = dict()
 
        c.statuses = []
 
        c.inline_comments = []
 
        c.inline_cnt = 0
 

	
 
        # Iterate over ranges (default changeset view is always one changeset)
 
        for changeset in c.cs_ranges:
 
            if method == 'show':
 
                c.statuses.extend([ChangesetStatusModel().get_status(
 
                            c.db_repo.repo_id, changeset.raw_id)])
 

	
 
                # Changeset comments
 
                comments.update((com.comment_id, com)
 
                                for com in ChangesetCommentsModel()
 
                                .get_comments(c.db_repo.repo_id,
 
                                              revision=changeset.raw_id))
 

	
 
                # Status change comments - mostly from pull requests
 
                comments.update((st.comment_id, st.comment)
 
                                for st in ChangesetStatusModel()
 
                                .get_statuses(c.db_repo.repo_id,
 
                                              changeset.raw_id, with_revisions=True)
 
                                if st.comment_id is not None)
 

	
 
                inlines = ChangesetCommentsModel() \
 
                            .get_inline_comments(c.db_repo.repo_id,
 
                                                 revision=changeset.raw_id)
 
                c.inline_comments.extend(inlines)
 

	
 
            cs2 = changeset.raw_id
 
            cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id
 
            context_lcl = get_line_ctx('', request.GET)
 
            ign_whitespace_lcl = get_ignore_ws('', request.GET)
 

	
 
            raw_diff = c.db_repo_scm_instance.get_diff(cs1, cs2,
 
                ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
 
            diff_limit = None if c.fulldiff else self.cut_off_limit
 
            file_diff_data = []
 
            if method == 'show':
 
                diff_processor = diffs.DiffProcessor(raw_diff,
 
                                                     vcs=c.db_repo_scm_instance.alias,
 
                                                     diff_limit=diff_limit)
 
                c.limited_diff = False
 
                if isinstance(diff_processor.parsed, LimitedDiffContainer):
 
                    c.limited_diff = True
 
                c.limited_diff = diff_processor.limited_diff
 
                for f in diff_processor.parsed:
 
                    st = f['stats']
 
                    c.lines_added += st['added']
 
                    c.lines_deleted += st['deleted']
 
                    filename = f['filename']
 
                    fid = h.FID(changeset.raw_id, filename)
 
                    url_fid = h.FID('', filename)
 
                    diff = diff_processor.as_html(enable_comments=enable_comments,
 
                                                  parsed_lines=[f])
 
                    file_diff_data.append((fid, url_fid, f['operation'], f['old_filename'], filename, diff, st))
 
            else:
 
                # downloads/raw we only need RAW diff nothing else
 
                file_diff_data.append(('', None, None, None, raw_diff, None))
 
            c.changes[changeset.raw_id] = (cs1, cs2, file_diff_data)
 

	
 
        # sort comments in creation order
 
        c.comments = [com for com_id, com in sorted(comments.items())]
 

	
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 

	
 
        if len(c.cs_ranges) == 1:
 
            c.changeset = c.cs_ranges[0]
 
            c.parent_tmpl = ''.join(['# Parent  %s\n' % x.raw_id
 
                                     for x in c.changeset.parents])
 
        if method == 'download':
 
            response.content_type = 'text/plain'
 
            response.content_disposition = 'attachment; filename=%s.diff' \
 
                                            % revision[:12]
 
            return raw_diff
 
        elif method == 'patch':
 
            response.content_type = 'text/plain'
 
            c.diff = safe_unicode(raw_diff)
 
            return render('changeset/patch_changeset.html')
 
        elif method == 'raw':
 
            response.content_type = 'text/plain'
 
            return raw_diff
 
        elif method == 'show':
 
            self.__load_data()
 
            if len(c.cs_ranges) == 1:
 
                return render('changeset/changeset.html')
 
            else:
 
                c.cs_ranges_org = None
 
                c.cs_comments = {}
 
                revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
 
                c.jsdata = graph_data(c.db_repo_scm_instance, revs)
kallithea/controllers/compare.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.compare
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
compare controller showing differences between two
 
repos, branches, bookmarks or tips
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 6, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import re
 

	
 
from tg import request, tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPBadRequest, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib.utils2 import safe_str, safe_int
 
from kallithea.lib.vcs.utils.hgcompat import unionrepo
 
from kallithea.lib import helpers as h
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
 
from kallithea.lib import diffs
 
from kallithea.model.db import Repository
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.controllers.changeset import _ignorews_url, _context_url
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class CompareController(BaseRepoController):
 

	
 
    def _before(self, *args, **kwargs):
 
        super(CompareController, self)._before(*args, **kwargs)
 

	
 
        # The base repository has already been retrieved.
 
        c.a_repo = c.db_repo
 

	
 
        # Retrieve the "changeset" repository (default: same as base).
 
        other_repo = request.GET.get('other_repo', None)
 
        if other_repo is None:
 
            c.cs_repo = c.a_repo
 
        else:
 
            c.cs_repo = Repository.get_by_repo_name(other_repo)
 
            if c.cs_repo is None:
 
                msg = _('Could not find other repository %s') % other_repo
 
                h.flash(msg, category='error')
 
                raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
 

	
 
        # Verify that it's even possible to compare these two repositories.
 
        if c.a_repo.scm_instance.alias != c.cs_repo.scm_instance.alias:
 
            msg = _('Cannot compare repositories of different types')
 
            h.flash(msg, category='error')
 
            raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
 

	
 
    @staticmethod
 
    def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
 
        """
 
        Returns lists of changesets that can be merged from org_repo@org_rev
 
        to other_repo@other_rev
 
        ... and the other way
 
        ... and the ancestors that would be used for merge
 

	
 
        :param org_repo: repo object, that is most likely the original repo we forked from
 
        :param org_rev: the revision we want our compare to be made
 
        :param other_repo: repo object, most likely the fork of org_repo. It has
 
            all changesets that we need to obtain
 
        :param other_rev: revision we want out compare to be made on other_repo
 
        """
 
        ancestors = None
 
        if org_rev == other_rev:
 
            org_changesets = []
 
@@ -227,67 +226,63 @@ class CompareController(BaseRepoControll
 

	
 
        revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
 
        c.jsdata = graph_data(c.cs_repo.scm_instance, revs)
 

	
 
        if partial:
 
            return render('compare/compare_cs.html')
 

	
 
        org_repo = c.a_repo
 
        other_repo = c.cs_repo
 

	
 
        if merge:
 
            rev1 = msg = None
 
            if not c.cs_ranges:
 
                msg = _('Cannot show empty diff')
 
            elif not c.ancestors:
 
                msg = _('No ancestor found for merge diff')
 
            elif len(c.ancestors) == 1:
 
                rev1 = c.ancestors[0]
 
            else:
 
                msg = _('Multiple merge ancestors found for merge compare')
 
            if rev1 is None:
 
                h.flash(msg, category='error')
 
                log.error(msg)
 
                raise HTTPNotFound
 

	
 
            # case we want a simple diff without incoming changesets,
 
            # previewing what will be merged.
 
            # Make the diff on the other repo (which is known to have other_rev)
 
            log.debug('Using ancestor %s as rev1 instead of %s',
 
                      rev1, c.a_rev)
 
            org_repo = other_repo
 
        else: # comparing tips, not necessarily linearly related
 
            if org_repo != other_repo:
 
                # TODO: we could do this by using hg unionrepo
 
                log.error('cannot compare across repos %s and %s', org_repo, other_repo)
 
                h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
 
                raise HTTPBadRequest
 
            rev1 = c.a_rev
 

	
 
        diff_limit = None if fulldiff else self.cut_off_limit
 

	
 
        log.debug('running diff between %s and %s in %s',
 
                  rev1, c.cs_rev, org_repo.scm_instance.path)
 
        raw_diff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 

	
 
        diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
 

	
 
        c.limited_diff = False
 
        if isinstance(diff_processor.parsed, LimitedDiffContainer):
 
            c.limited_diff = True
 

	
 
        c.limited_diff = diff_processor.limited_diff
 
        c.file_diff_data = []
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 
        for f in diff_processor.parsed:
 
            st = f['stats']
 
            c.lines_added += st['added']
 
            c.lines_deleted += st['deleted']
 
            filename = f['filename']
 
            fid = h.FID('', filename)
 
            diff = diff_processor.as_html(enable_comments=False,
 
                                          parsed_lines=[f])
 
            c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
 

	
 
        return render('compare/compare_diff.html')
kallithea/controllers/feed.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.feed
 
~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Feed controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 23, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 

	
 
from tg import response, tmpl_context as c
 
from tg.i18n import ugettext as _
 

	
 
from beaker.cache import cache_region, region_invalidate
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from kallithea import CONFIG
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
 
from kallithea.lib.base import BaseRepoController
 
from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer
 
from kallithea.lib.diffs import DiffProcessor
 
from kallithea.model.db import CacheInvalidation
 
from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
language = 'en-us'
 
ttl = "5"
 

	
 

	
 
class FeedController(BaseRepoController):
 

	
 
    @LoginRequired(api_access=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def _before(self, *args, **kwargs):
 
        super(FeedController, self)._before(*args, **kwargs)
 

	
 
    def _get_title(self, cs):
 
        return h.shorter(cs.message, 160)
 

	
 
    def __get_desc(self, cs):
 
        desc_msg = [(_('%s committed on %s')
 
                     % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
 
        # branches, tags, bookmarks
 
        if cs.branch:
 
            desc_msg.append('branch: %s<br/>' % cs.branch)
 
        for book in cs.bookmarks:
 
            desc_msg.append('bookmark: %s<br/>' % book)
 
        for tag in cs.tags:
 
            desc_msg.append('tag: %s<br/>' % tag)
 

	
 
        changes = []
 
        diff_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
 
        raw_diff = cs.diff()
 
        diff_processor = DiffProcessor(raw_diff,
 
                                       diff_limit=diff_limit,
 
                                       inline_diff=False)
 
        limited_diff = False
 
        if isinstance(diff_processor.parsed, LimitedDiffContainer):
 
            limited_diff = True
 

	
 
        for st in diff_processor.parsed:
 
            st.update({'added': st['stats']['added'],
 
                       'removed': st['stats']['deleted']})
 
            changes.append('\n %(operation)s %(filename)s '
 
                           '(%(added)s lines added, %(removed)s lines removed)'
 
                            % st)
 
        if limited_diff:
 
        if diff_processor.limited_diff:
 
            changes = changes + ['\n ' +
 
                                 _('Changeset was too big and was cut off...')]
 

	
 
        # rev link
 
        _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id)
 
        desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
 

	
 
        desc_msg.append('<pre>')
 
        desc_msg.append(h.urlify_text(cs.message))
 
        desc_msg.append('\n')
 
        desc_msg.extend(changes)
 
        if str2bool(CONFIG.get('rss_include_diff', False)):
 
            desc_msg.append('\n\n')
 
            desc_msg.append(raw_diff)
 
        desc_msg.append('</pre>')
 
        return map(safe_unicode, desc_msg)
 

	
 
    def atom(self, repo_name):
 
        """Produce an atom-1.0 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term', '_get_feed_from_cache')
 
        def _get_feed_from_cache(key, kind):
 
            feed = Atom1Feed(
 
                title=_('%s %s feed') % (c.site_name, repo_name),
 
                link=h.canonical_url('summary_home', repo_name=repo_name),
 
                description=_('Changes on %s repository') % repo_name,
 
                language=language,
 
                ttl=ttl
 
            )
 

	
 
            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
 
            for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                              )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'ATOM'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
kallithea/controllers/pullrequests.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.pullrequests
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
pull requests controller for Kallithea for initializing pull requests
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 7, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
import formencode
 

	
 
from tg import request, tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib import helpers as h
 
from kallithea.lib import diffs
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator, \
 
    NotAnonymous
 
from kallithea.lib.base import BaseRepoController, render, jsonify
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.lib.page import Page
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.vcs.exceptions import EmptyRepositoryError, ChangesetDoesNotExistError
 
from kallithea.lib.vcs.utils import safe_str
 
from kallithea.lib.vcs.utils.hgcompat import unionrepo
 
from kallithea.model.db import PullRequest, ChangesetStatus, ChangesetComment, \
 
    PullRequestReviewer, Repository, User
 
from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.forms import PullRequestForm, PullRequestPostForm
 
from kallithea.lib.utils2 import safe_int
 
from kallithea.controllers.changeset import _ignorews_url, _context_url, \
 
    create_comment
 
from kallithea.controllers.compare import CompareController
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _get_reviewer(user_id):
 
    """Look up user by ID and validate it as a potential reviewer."""
 
    try:
 
        user = User.get(int(user_id))
 
    except ValueError:
 
        user = None
 

	
 
    if user is None or user.is_default_user:
 
        h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error')
 
        raise HTTPBadRequest()
 

	
 
    return user
 

	
 

	
 
class PullrequestsController(BaseRepoController):
 

	
 
    def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
 
        """return a structure with repo's interesting changesets, suitable for
 
        the selectors in pullrequest.html
 

	
 
        rev: a revision that must be in the list somehow and selected by default
 
        branch: a branch that must be in the list and selected by default - even if closed
 
        branch_rev: a revision of which peers should be preferred and available."""
 
        # list named branches that has been merged to this named branch - it should probably merge back
 
        peers = []
 

	
 
@@ -553,101 +552,97 @@ class PullrequestsController(BaseRepoCon
 
                        c.update_msg = _('The following additional changes are available on %s:') % c.cs_branch_name
 
                    else:
 
                        show = set()
 
                        avail_revs = set() # drop revs[0]
 
                        c.update_msg = _('No additional changesets found for iterating on this pull request.')
 

	
 
                    # TODO: handle branch heads that not are tip-most
 
                    brevs = org_scm_instance._repo.revs('%s - %ld - %s', c.cs_branch_name, avail_revs, revs[0])
 
                    if brevs:
 
                        # also show changesets that are on branch but neither ancestors nor descendants
 
                        show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name))
 
                        show.add(revs[0]) # make sure graph shows this so we can see how they relate
 
                        c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name,
 
                            h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id))
 

	
 
                    avail_show = sorted(show, reverse=True)
 

	
 
            elif org_scm_instance.alias == 'git':
 
                c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not
 
                c.update_msg = _("Git pull requests don't support iterating yet.")
 
        except ChangesetDoesNotExistError:
 
            c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev
 

	
 
        c.avail_revs = avail_revs
 
        c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show]
 
        c.avail_jsdata = graph_data(org_scm_instance, avail_show)
 

	
 
        raw_ids = [x.raw_id for x in c.cs_ranges]
 
        c.cs_comments = c.cs_repo.get_comments(raw_ids)
 
        c.cs_statuses = c.cs_repo.statuses(raw_ids)
 

	
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = safe_int(request.GET.get('context'), 3)
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        fulldiff = request.GET.get('fulldiff')
 
        diff_limit = None if fulldiff else self.cut_off_limit
 

	
 
        # we swap org/other ref since we run a simple diff on one repo
 
        log.debug('running diff between %s and %s in %s',
 
                  c.a_rev, c.cs_rev, org_scm_instance.path)
 
        try:
 
            raw_diff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
 
                                                ignore_whitespace=ignore_whitespace,
 
                                                context=line_context)
 
        except ChangesetDoesNotExistError:
 
            raw_diff = _("The diff can't be shown - the PR revisions could not be found.")
 
        diff_processor = diffs.DiffProcessor(raw_diff or '', diff_limit=diff_limit)
 

	
 
        c.limited_diff = False
 
        if isinstance(diff_processor.parsed, LimitedDiffContainer):
 
            c.limited_diff = True
 

	
 
        c.limited_diff = diff_processor.limited_diff
 
        c.file_diff_data = []
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 

	
 
        for f in diff_processor.parsed:
 
            st = f['stats']
 
            c.lines_added += st['added']
 
            c.lines_deleted += st['deleted']
 
            filename = f['filename']
 
            fid = h.FID('', filename)
 
            diff = diff_processor.as_html(enable_comments=True,
 
                                          parsed_lines=[f])
 
            c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
 

	
 
        # inline comments
 
        c.inline_cnt = 0
 
        c.inline_comments = cc_model.get_inline_comments(
 
                                c.db_repo.repo_id,
 
                                pull_request=pull_request_id)
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 
        # comments
 
        c.comments = cc_model.get_comments(c.db_repo.repo_id, pull_request=pull_request_id)
 

	
 
        # (badly named) pull-request status calculation based on reviewer votes
 
        (c.pull_request_reviewers,
 
         c.pull_request_pending_reviewers,
 
         c.current_voting_result,
 
         ) = cs_model.calculate_pull_request_result(c.pull_request)
 
        c.changeset_statuses = ChangesetStatus.STATUSES
 

	
 
        c.is_ajax_preview = False
 
        c.ancestors = None # [c.a_rev] ... but that is shown in an other way
 
        return render('/pullrequests/pullrequest_show.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionLevelDecorator('read')
 
    @jsonify
 
    def comment(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 

	
 
        status = request.POST.get('changeset_status')
 
        close_pr = request.POST.get('save_close')
 
        delete = request.POST.get('save_delete')
 
        f_path = request.POST.get('f_path')
kallithea/lib/diffs.py
Show inline comments
 
@@ -96,408 +96,394 @@ def wrapped_diff(filenode_old, filenode_
 
            diff = wrap_to_table(escape('Submodule %r' % submodules[0]))
 
        else:
 
            diff = wrap_to_table(_('No changes detected'))
 

	
 
    cs1 = filenode_old.changeset.raw_id
 
    cs2 = filenode_new.changeset.raw_id
 

	
 
    return cs1, cs2, a_path, diff, stats, op
 

	
 

	
 
def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True, context=3):
 
    """
 
    Returns git style diff between given ``filenode_old`` and ``filenode_new``.
 
    """
 
    # make sure we pass in default context
 
    context = context or 3
 
    submodules = filter(lambda o: isinstance(o, SubModuleNode),
 
                        [filenode_new, filenode_old])
 
    if submodules:
 
        return ''
 

	
 
    for filenode in (filenode_old, filenode_new):
 
        if not isinstance(filenode, FileNode):
 
            raise VCSError("Given object should be FileNode object, not %s"
 
                % filenode.__class__)
 

	
 
    repo = filenode_new.changeset.repository
 
    old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
 
    new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
 

	
 
    vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
 
                                ignore_whitespace, context)
 
    return vcs_gitdiff
 

	
 

	
 
NEW_FILENODE = 1
 
DEL_FILENODE = 2
 
MOD_FILENODE = 3
 
RENAMED_FILENODE = 4
 
COPIED_FILENODE = 5
 
CHMOD_FILENODE = 6
 
BIN_FILENODE = 7
 

	
 

	
 
class DiffLimitExceeded(Exception):
 
    pass
 

	
 

	
 
class LimitedDiffContainer(object):
 

	
 
    def __init__(self, diff_limit, cur_diff_size, diff):
 
        self.diff = diff
 
        self.diff_limit = diff_limit
 
        self.cur_diff_size = cur_diff_size
 

	
 
    def __iter__(self):
 
        for l in self.diff:
 
            yield l
 

	
 

	
 
class DiffProcessor(object):
 
    """
 
    Give it a unified or git diff and it returns a list of the files that were
 
    mentioned in the diff together with a dict of meta information that
 
    can be used to render it in a HTML template.
 
    """
 
    _diff_git_re = re.compile('^diff --git', re.MULTILINE)
 
    _chunk_re = re.compile(r'^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
 
    _newline_marker = re.compile(r'^\\ No newline at end of file')
 
    _git_header_re = re.compile(r"""
 
        ^diff[ ]--git
 
            [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
 
        (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
 
           ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
 
        (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%\n
 
           ^rename[ ]from[ ](?P<rename_from>.+)\n
 
           ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
 
        (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
 
        (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
 
        (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
 
            \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
 
        (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
 
        (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
 
        (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
 
    """, re.VERBOSE | re.MULTILINE)
 
    _hg_header_re = re.compile(r"""
 
        ^diff[ ]--git
 
            [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
 
        (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
 
           ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
 
        (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
 
        (?:^rename[ ]from[ ](?P<rename_from>.+)\n
 
           ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
 
        (?:^copy[ ]from[ ](?P<copy_from>.+)\n
 
           ^copy[ ]to[ ](?P<copy_to>.+)(?:\n|$))?
 
        (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
 
        (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
 
        (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
 
            \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
 
        (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
 
        (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
 
        (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
 
    """, re.VERBOSE | re.MULTILINE)
 

	
 
    # Used for inline highlighter word split, must match the substitutions in _escaper
 
    _token_re = re.compile(r'()(&amp;|&lt;|&gt;|<u>\t</u>|<u class="cr"></u>| <i></i>|\W+?)')
 

	
 
    _escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)')
 

	
 
    def __init__(self, diff, vcs='hg', diff_limit=None, inline_diff=True):
 
        """
 
        :param diff:   a text in diff format
 
        :param vcs: type of version control hg or git
 
        :param diff_limit: define the size of diff that is considered "big"
 
            based on that parameter cut off will be triggered, set to None
 
            to show full diff
 
        """
 
        if not isinstance(diff, basestring):
 
            raise Exception('Diff must be a basestring got %s instead' % type(diff))
 

	
 
        self._diff = diff
 
        self.adds = 0
 
        self.removes = 0
 
        # calculate diff size
 
        self.diff_limit = diff_limit
 
        self.cur_diff_size = 0
 
        self.limited_diff = False
 
        self.vcs = vcs
 
        self.parsed = self._parse_gitdiff(inline_diff=inline_diff)
 

	
 
    def _escaper(self, string):
 
        """
 
        Do HTML escaping/markup and check the diff limit
 
        """
 
        self.cur_diff_size += len(string)
 

	
 
        # escaper gets iterated on each .next() call and it checks if each
 
        # parsed line doesn't exceed the diff limit
 
        if self.diff_limit is not None and self.cur_diff_size > self.diff_limit:
 
            raise DiffLimitExceeded('Diff Limit Exceeded')
 

	
 
        def substitute(m):
 
            groups = m.groups()
 
            if groups[0]:
 
                return '&amp;'
 
            if groups[1]:
 
                return '&lt;'
 
            if groups[2]:
 
                return '&gt;'
 
            if groups[3]:
 
                return '<u>\t</u>'
 
            if groups[4]:
 
                return '<u class="cr"></u>'
 
            if groups[5]:
 
                return ' <i></i>'
 
            assert False
 

	
 
        return self._escape_re.sub(substitute, safe_unicode(string))
 

	
 
    def _highlight_inline_diff(self, old, new):
 
        """
 
        Highlight simple add/remove in two lines given as info dicts. They are
 
        modified in place and given markup with <del>/<ins>.
 
        """
 
        assert old['action'] == 'del'
 
        assert new['action'] == 'add'
 

	
 
        oldwords = self._token_re.split(old['line'])
 
        newwords = self._token_re.split(new['line'])
 
        sequence = difflib.SequenceMatcher(None, oldwords, newwords)
 

	
 
        oldfragments, newfragments = [], []
 
        for tag, i1, i2, j1, j2 in sequence.get_opcodes():
 
            oldfrag = ''.join(oldwords[i1:i2])
 
            newfrag = ''.join(newwords[j1:j2])
 
            if tag != 'equal':
 
                if oldfrag:
 
                    oldfrag = '<del>%s</del>' % oldfrag
 
                if newfrag:
 
                    newfrag = '<ins>%s</ins>' % newfrag
 
            oldfragments.append(oldfrag)
 
            newfragments.append(newfrag)
 

	
 
        old['line'] = "".join(oldfragments)
 
        new['line'] = "".join(newfragments)
 

	
 
    def _get_header(self, diff_chunk):
 
        """
 
        Parses a Git diff for a single file (header and chunks) and returns a tuple with:
 

	
 
        1. A dict with meta info:
 

	
 
            a_path, b_path, similarity_index, rename_from, rename_to,
 
            old_mode, new_mode, new_file_mode, deleted_file_mode,
 
            a_blob_id, b_blob_id, b_mode, a_file, b_file
 

	
 
        2. An iterator yielding lines with simple HTML markup.
 
        """
 
        match = None
 
        if self.vcs == 'git':
 
            match = self._git_header_re.match(diff_chunk)
 
        elif self.vcs == 'hg':
 
            match = self._hg_header_re.match(diff_chunk)
 
        if match is None:
 
            raise Exception('diff not recognized as valid %s diff' % self.vcs)
 
        meta_info = match.groupdict()
 
        rest = diff_chunk[match.end():]
 
        if rest and not rest.startswith('@') and not rest.startswith('literal ') and not rest.startswith('delta '):
 
            raise Exception('cannot parse %s diff header: %r followed by %r' % (self.vcs, diff_chunk[:match.end()], rest[:1000]))
 
        diff_lines = (self._escaper(m.group(0)) for m in re.finditer(r'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
 
        return meta_info, diff_lines
 

	
 
    def _parse_gitdiff(self, inline_diff):
 
        """Parse self._diff and return a list of dicts with meta info and chunks for each file.
 
        If diff is truncated, wrap it in LimitedDiffContainer.
 
        Might set limited_diff.
 
        Optionally, do an extra pass and to extra markup of one-liner changes.
 
        """
 
        _files = [] # list of dicts with meta info and chunks
 
        diff_container = lambda arg: arg
 

	
 
        starts = [m.start() for m in self._diff_git_re.finditer(self._diff)]
 
        starts.append(len(self._diff))
 

	
 
        for start, end in zip(starts, starts[1:]):
 
            head, diff_lines = self._get_header(buffer(self._diff, start, end - start))
 

	
 
            op = None
 
            stats = {
 
                'added': 0,
 
                'deleted': 0,
 
                'binary': False,
 
                'ops': {},
 
            }
 

	
 
            if head['deleted_file_mode']:
 
                op = 'D'
 
                stats['binary'] = True
 
                stats['ops'][DEL_FILENODE] = 'deleted file'
 

	
 
            elif head['new_file_mode']:
 
                op = 'A'
 
                stats['binary'] = True
 
                stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
 
            else:  # modify operation, can be cp, rename, chmod
 
                # CHMOD
 
                if head['new_mode'] and head['old_mode']:
 
                    op = 'M'
 
                    stats['binary'] = True
 
                    stats['ops'][CHMOD_FILENODE] = ('modified file chmod %s => %s'
 
                                        % (head['old_mode'], head['new_mode']))
 
                # RENAME
 
                if (head['rename_from'] and head['rename_to']
 
                      and head['rename_from'] != head['rename_to']):
 
                    op = 'R'
 
                    stats['binary'] = True
 
                    stats['ops'][RENAMED_FILENODE] = ('file renamed from %s to %s'
 
                                    % (head['rename_from'], head['rename_to']))
 
                # COPY
 
                if head.get('copy_from') and head.get('copy_to'):
 
                    op = 'M'
 
                    stats['binary'] = True
 
                    stats['ops'][COPIED_FILENODE] = ('file copied from %s to %s'
 
                                        % (head['copy_from'], head['copy_to']))
 
                # FALL BACK: detect missed old style add or remove
 
                if op is None:
 
                    if not head['a_file'] and head['b_file']:
 
                        op = 'A'
 
                        stats['binary'] = True
 
                        stats['ops'][NEW_FILENODE] = 'new file'
 

	
 
                    elif head['a_file'] and not head['b_file']:
 
                        op = 'D'
 
                        stats['binary'] = True
 
                        stats['ops'][DEL_FILENODE] = 'deleted file'
 

	
 
                # it's not ADD not DELETE
 
                if op is None:
 
                    op = 'M'
 
                    stats['binary'] = True
 
                    stats['ops'][MOD_FILENODE] = 'modified file'
 

	
 
            # a real non-binary diff
 
            if head['a_file'] or head['b_file']:
 
                try:
 
                    chunks, added, deleted = self._parse_lines(diff_lines)
 
                    stats['binary'] = False
 
                    stats['added'] = added
 
                    stats['deleted'] = deleted
 
                    # explicit mark that it's a modified file
 
                    if op == 'M':
 
                        stats['ops'][MOD_FILENODE] = 'modified file'
 

	
 
                except DiffLimitExceeded:
 
                    diff_container = lambda _diff: \
 
                        LimitedDiffContainer(self.diff_limit,
 
                                            self.cur_diff_size, _diff)
 
                    self.limited_diff = True
 
                    break
 
            else:  # Git binary patch (or empty diff)
 
                # Git binary patch
 
                if head['bin_patch']:
 
                    stats['ops'][BIN_FILENODE] = 'binary diff not shown'
 
                chunks = []
 

	
 
            if op == 'D' and chunks:
 
                # a way of seeing deleted content could perhaps be nice - but
 
                # not with the current UI
 
                chunks = []
 

	
 
            chunks.insert(0, [{
 
                'old_lineno': '',
 
                'new_lineno': '',
 
                'action':     'context',
 
                'line':       msg,
 
                } for _op, msg in stats['ops'].iteritems()
 
                  if _op not in [MOD_FILENODE]])
 

	
 
            _files.append({
 
                'old_filename':     head['a_path'],
 
                'filename':         head['b_path'],
 
                'old_revision':     head['a_blob_id'],
 
                'new_revision':     head['b_blob_id'],
 
                'chunks':           chunks,
 
                'operation':        op,
 
                'stats':            stats,
 
            })
 

	
 
        if not inline_diff:
 
            return diff_container(_files)
 
            return _files
 

	
 
        # highlight inline changes when one del is followed by one add
 
        for diff_data in _files:
 
            for chunk in diff_data['chunks']:
 
                lineiter = iter(chunk)
 
                try:
 
                    peekline = lineiter.next()
 
                    while True:
 
                        # find a first del line
 
                        while peekline['action'] != 'del':
 
                            peekline = lineiter.next()
 
                        delline = peekline
 
                        peekline = lineiter.next()
 
                        # if not followed by add, eat all following del lines
 
                        if peekline['action'] != 'add':
 
                            while peekline['action'] == 'del':
 
                                peekline = lineiter.next()
 
                            continue
 
                        # found an add - make sure it is the only one
 
                        addline = peekline
 
                        try:
 
                            peekline = lineiter.next()
 
                        except StopIteration:
 
                            # add was last line - ok
 
                            self._highlight_inline_diff(delline, addline)
 
                            raise
 
                        if peekline['action'] != 'add':
 
                            # there was only one add line - ok
 
                            self._highlight_inline_diff(delline, addline)
 
                except StopIteration:
 
                    pass
 

	
 
        return diff_container(_files)
 
        return _files
 

	
 
    def _parse_lines(self, diff_lines):
 
        """
 
        Given an iterator of diff body lines, parse them and return a dict per
 
        line and added/removed totals.
 
        """
 
        added = deleted = 0
 
        old_line = old_end = new_line = new_end = None
 

	
 
        try:
 
            chunks = []
 
            line = diff_lines.next()
 

	
 
            while True:
 
                lines = []
 
                chunks.append(lines)
 

	
 
                match = self._chunk_re.match(line)
 

	
 
                if not match:
 
                    raise Exception('error parsing diff @@ line %r' % line)
 

	
 
                gr = match.groups()
 
                (old_line, old_end,
 
                 new_line, new_end) = [int(x or 1) for x in gr[:-1]]
 
                old_line -= 1
 
                new_line -= 1
 

	
 
                context = len(gr) == 5
 
                old_end += old_line
 
                new_end += new_line
 

	
 
                if context:
 
                    # skip context only if it's first line
 
                    if int(gr[0]) > 1:
 
                        lines.append({
 
                            'old_lineno': '...',
 
                            'new_lineno': '...',
 
                            'action':     'context',
 
                            'line':       line,
 
                        })
 

	
 
                line = diff_lines.next()
 

	
 
                while old_line < old_end or new_line < new_end:
 
                    if not line:
 
                        raise Exception('error parsing diff - empty line at -%s+%s' % (old_line, new_line))
 

	
0 comments (0 inline, 0 general)