Changeset - f9bc28c44f30
[Not reviewed]
default
0 19 0
Mads Kiilerich - 11 years ago 2014-08-12 13:08:23
madski@unity3d.com
urls: introduce canonical_url config setting

All URLs that are shown or persisted or emailed will use this instead of the
current url.

This is convenient when the server has multiple names - for instance when
transitioning from one protocol or domain or hostname to another.
19 files changed with 99 insertions and 101 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/feed.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.feed
 
~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Feed controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 23, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 

	
 
from pylons import url, response, tmpl_context as c
 
from pylons import response, tmpl_context as c
 
from pylons.i18n.translation import _
 

	
 
from beaker.cache import cache_region, region_invalidate
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from kallithea.lib.base import BaseRepoController
 
from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer
 
from kallithea.model.db import CacheInvalidation
 
from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FeedController(BaseRepoController):
 

	
 
    @LoginRequired(api_access=True)
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def __before__(self):
 
        super(FeedController, self).__before__()
 
        #common values for feeds
 
        self.description = _('Changes on %s repository')
 
        self.title = self.title = _('%s %s feed') % (c.site_name, '%s')
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        import kallithea
 
        CONF = kallithea.CONFIG
 
        self.include_diff = str2bool(CONF.get('rss_include_diff', False))
 
        self.feed_nr = safe_int(CONF.get('rss_items_per_page', 20))
 
        # we need to protect from parsing huge diffs here other way
 
        # we can kill the server
 
        self.feed_diff_limit = safe_int(CONF.get('rss_cut_off_limit', 32 * 1024))
 

	
 
    def _get_title(self, cs):
 
        return "%s" % (
 
            h.shorter(cs.message, 160)
 
        )
 

	
 
    def __changes(self, cs):
 
        changes = []
 
        diff_processor = DiffProcessor(cs.diff(),
 
                                       diff_limit=self.feed_diff_limit)
 
        _parsed = diff_processor.prepare(inline_diff=False)
 
        limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            limited_diff = True
 

	
 
        for st in _parsed:
 
            st.update({'added': st['stats']['added'],
 
                       'removed': st['stats']['deleted']})
 
            changes.append('\n %(operation)s %(filename)s '
 
                           '(%(added)s lines added, %(removed)s lines removed)'
 
                            % st)
 
        if limited_diff:
 
            changes = changes + ['\n ' +
 
                                 _('Changeset was too big and was cut off...')]
 
        return diff_processor, changes
 

	
 
    def __get_desc(self, cs):
 
        desc_msg = [(_('%s committed on %s')
 
                     % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
 
        #branches, tags, bookmarks
 
        if cs.branch:
 
            desc_msg.append('branch: %s<br/>' % cs.branch)
 
        if h.is_hg(c.db_repo_scm_instance):
 
            for book in cs.bookmarks:
 
                desc_msg.append('bookmark: %s<br/>' % book)
 
        for tag in cs.tags:
 
            desc_msg.append('tag: %s<br/>' % tag)
 
        diff_processor, changes = self.__changes(cs)
 
        # rev link
 
        _url = url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id, qualified=True)
 
        _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id)
 
        desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
 

	
 
        desc_msg.append('<pre>')
 
        desc_msg.append(cs.message)
 
        desc_msg.append('\n')
 
        desc_msg.extend(changes)
 
        if self.include_diff:
 
            desc_msg.append('\n\n')
 
            desc_msg.append(diff_processor.as_raw())
 
        desc_msg.append('</pre>')
 
        return map(safe_unicode, desc_msg)
 

	
 
    def atom(self, repo_name):
 
        """Produce an atom-1.0 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term')
 
        def _get_feed_from_cache(key, kind):
 
            feed = Atom1Feed(
 
                 title=self.title % repo_name,
 
                 link=url('summary_home', repo_name=repo_name,
 
                          qualified=True),
 
                 link=h.canonical_url('summary_home', repo_name=repo_name),
 
                 description=self.description % repo_name,
 
                 language=self.language,
 
                 ttl=self.ttl
 
            )
 

	
 
            for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id, qualified=True),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                              )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'ATOM'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
 
        return _get_feed_from_cache(repo_name, kind)
 

	
 
    def rss(self, repo_name):
 
        """Produce an rss2 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term')
 
        def _get_feed_from_cache(key, kind):
 
            feed = Rss201rev2Feed(
 
                title=self.title % repo_name,
 
                link=url('summary_home', repo_name=repo_name,
 
                         qualified=True),
 
                link=h.canonical_url('summary_home', repo_name=repo_name),
 
                description=self.description % repo_name,
 
                language=self.language,
 
                ttl=self.ttl
 
            )
 

	
 
            for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id, qualified=True),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                             )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'RSS'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
 
        return _get_feed_from_cache(repo_name, kind)
kallithea/controllers/journal.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.journal
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Journal controller for pylons
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
from itertools import groupby
 

	
 
from sqlalchemy import or_
 
from sqlalchemy.orm import joinedload
 
from sqlalchemy.sql.expression import func
 

	
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from webob.exc import HTTPBadRequest
 
from pylons import request, tmpl_context as c, response, url
 
from pylons.i18n.translation import _
 

	
 
from kallithea.controllers.admin.admin import _journal_filter
 
from kallithea.model.db import UserLog, UserFollowing, Repository, User
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
import kallithea.lib.helpers as h
 
from kallithea.lib.helpers import Page
 
from kallithea.lib.auth import LoginRequired, NotAnonymous
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.utils2 import safe_int, AttributeDict
 
from kallithea.lib.compat import json
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class JournalController(BaseController):
 

	
 
    def __before__(self):
 
        super(JournalController, self).__before__()
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        self.feed_nr = 20
 
        c.search_term = request.GET.get('filter')
 

	
 
    def _get_daily_aggregate(self, journal):
 
        groups = []
 
        for k, g in groupby(journal, lambda x: x.action_as_day):
 
            user_group = []
 
            #groupby username if it's a present value, else fallback to journal username
 
            for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username):
 
                l = list(g2)
 
                user_group.append((l[0].user, l))
 

	
 
            groups.append((k, user_group,))
 

	
 
        return groups
 

	
 
    def _get_journal_data(self, following_repos):
 
        repo_ids = [x.follows_repository.repo_id for x in following_repos
 
                    if x.follows_repository is not None]
 
        user_ids = [x.follows_user.user_id for x in following_repos
 
                    if x.follows_user is not None]
 

	
 
        filtering_criterion = None
 

	
 
        if repo_ids and user_ids:
 
            filtering_criterion = or_(UserLog.repository_id.in_(repo_ids),
 
                        UserLog.user_id.in_(user_ids))
 
        if repo_ids and not user_ids:
 
            filtering_criterion = UserLog.repository_id.in_(repo_ids)
 
        if not repo_ids and user_ids:
 
            filtering_criterion = UserLog.user_id.in_(user_ids)
 
        if filtering_criterion is not None:
 
            journal = self.sa.query(UserLog)\
 
                .options(joinedload(UserLog.user))\
 
                .options(joinedload(UserLog.repository))
 
            #filter
 
            try:
 
                journal = _journal_filter(journal, c.search_term)
 
            except Exception:
 
                # we want this to crash for now
 
                raise
 
            journal = journal.filter(filtering_criterion)\
 
                        .order_by(UserLog.action_date.desc())
 
        else:
 
            journal = []
 

	
 
        return journal
 

	
 
    def _atom_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = url('public_journal_atom', qualified=True)
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'atom feed')
 
        else:
 
            _link = url('journal_atom', qualified=True)
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'atom feed')
 

	
 
        feed = Atom1Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = url('changelog_home',
 
                           repo_name=entry.repository.repo_name,
 
                           qualified=True)
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or url('', qualified=True),
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    def _rss_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = url('public_journal_atom', qualified=True)
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'rss feed')
 
        else:
 
            _link = url('journal_atom', qualified=True)
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'rss feed')
 

	
 
        feed = Rss201rev2Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = url('changelog_home',
 
                           repo_name=entry.repository.repo_name,
 
                           qualified=True)
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or url('', qualified=True),
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def index(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 
        c.user = User.get(self.authuser.user_id)
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator)
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 

	
 
        repos_list = Session().query(Repository)\
 
                     .filter(Repository.user_id ==
 
                             self.authuser.user_id)\
 
                     .order_by(func.lower(Repository.repo_name)).all()
 

	
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list,
 
                                                   admin=True)
 
        #json used to render the grid
 
        c.data = json.dumps(repos_data)
 

	
 
        watched_repos_data = []
 

	
 
        ## watched repos
 
        _render = RepoModel._render_datatable
 

	
 
        def quick_menu(repo_name):
 
            return _render('quick_menu', repo_name)
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=False, admin=False)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            from pylons import tmpl_context as c
 
            if c.visual.stylify_metatags:
 
                return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
 
            else:
 
                return h.urlify_text(h.truncate(desc, 60))
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name)
 

	
 
        def owner_actions(user_id, username):
 
            return _render('user_name', user_id, username)
 

	
 
        def toogle_follow(repo_id):
 
            return  _render('toggle_follow', repo_id)
 

	
 
        for entry in c.following:
 
            repo = entry.follows_repository
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "menu": quick_menu(repo.repo_name),
 
                "raw_name": repo.repo_name.lower(),
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "action": toogle_follow(repo.repo_id)
 
            }
 

	
 
            watched_repos_data.append(row)
 

	
 
        c.watched_data = json.dumps({
 
            "totalRecords": len(c.following),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": watched_repos_data
 
        })
 
        return render('journal/journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._atom_feed(following, public=False)
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_rss(self):
 
        """
 
        Produce an rss feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._rss_feed(following, public=False)
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def toggle_following(self):
 
        cur_token = request.POST.get('auth_token')
 
        token = h.get_token()
 
        if cur_token == token:
 

	
 
            user_id = request.POST.get('follows_user_id')
 
            if user_id:
 
                try:
 
                    self.scm_model.toggle_following_user(user_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    raise HTTPBadRequest()
 

	
 
            repo_id = request.POST.get('follows_repo_id')
 
            if repo_id:
 
                try:
 
                    self.scm_model.toggle_following_repo(repo_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    raise HTTPBadRequest()
 

	
 
        log.debug('token mismatch %s vs %s' % (cur_token, token))
 
        raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    def public_journal(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20)
 

	
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 
        return render('journal/public_journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        return self._atom_feed(c.following)
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_rss(self):
 
        """
 
        Produce an rss2 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
kallithea/controllers/pullrequests.py
Show inline comments
 
@@ -308,450 +308,449 @@ class PullrequestsController(BaseRepoCon
 

	
 
        return render('/pullrequests/pullrequest_show_all.html')
 

	
 
    @LoginRequired()
 
    def show_my(self): # my_account_my_pullrequests
 
        c.show_closed = request.GET.get('pr_show_closed')
 
        return render('/pullrequests/pullrequest_show_my.html')
 

	
 
    @NotAnonymous()
 
    def show_my_data(self):
 
        c.show_closed = request.GET.get('pr_show_closed')
 

	
 
        def _filter(pr):
 
            s = sorted(pr, key=lambda o: o.created_on, reverse=True)
 
            if not c.show_closed:
 
                s = filter(lambda p: p.status != PullRequest.STATUS_CLOSED, s)
 
            return s
 

	
 
        c.my_pull_requests = _filter(PullRequest.query()\
 
                                .filter(PullRequest.user_id ==
 
                                        self.authuser.user_id)\
 
                                .all())
 

	
 
        c.participate_in_pull_requests = _filter(PullRequest.query()\
 
                                .join(PullRequestReviewers)\
 
                                .filter(PullRequestReviewers.user_id ==
 
                                        self.authuser.user_id)\
 
                                                 )
 

	
 
        return render('/pullrequests/pullrequest_show_my_data.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self):
 
        org_repo = c.db_repo
 

	
 
        try:
 
            org_repo.scm_instance.get_changeset()
 
        except EmptyRepositoryError, e:
 
            h.flash(h.literal(_('There are no changesets yet')),
 
                    category='warning')
 
            redirect(url('summary_home', repo_name=org_repo.repo_name))
 

	
 
        org_rev = request.GET.get('rev_end')
 
        # rev_start is not directly useful - its parent could however be used
 
        # as default for other and thus give a simple compare view
 
        #other_rev = request.POST.get('rev_start')
 
        branch = request.GET.get('branch')
 

	
 
        c.org_repos = []
 
        c.org_repos.append((org_repo.repo_name, org_repo.repo_name))
 
        c.default_org_repo = org_repo.repo_name
 
        c.org_refs, c.default_org_ref = self._get_repo_refs(org_repo.scm_instance, rev=org_rev, branch=branch)
 

	
 
        c.other_repos = []
 
        other_repos_info = {}
 

	
 
        def add_other_repo(repo, branch_rev=None):
 
            if repo.repo_name in other_repos_info: # shouldn't happen
 
                return
 
            c.other_repos.append((repo.repo_name, repo.repo_name))
 
            other_refs, selected_other_ref = self._get_repo_refs(repo.scm_instance, branch_rev=branch_rev)
 
            other_repos_info[repo.repo_name] = {
 
                'user': dict(user_id=repo.user.user_id,
 
                             username=repo.user.username,
 
                             firstname=repo.user.firstname,
 
                             lastname=repo.user.lastname,
 
                             gravatar_link=h.gravatar_url(repo.user.email, 14)),
 
                'description': repo.description.split('\n', 1)[0],
 
                'revs': h.select('other_ref', selected_other_ref, other_refs, class_='refs')
 
            }
 

	
 
        # add org repo to other so we can open pull request against peer branches on itself
 
        add_other_repo(org_repo, branch_rev=org_rev)
 
        c.default_other_repo = org_repo.repo_name
 

	
 
        # gather forks and add to this list ... even though it is rare to
 
        # request forks to pull from their parent
 
        for fork in org_repo.forks:
 
            add_other_repo(fork)
 

	
 
        # add parents of this fork also, but only if it's not empty
 
        if org_repo.parent and org_repo.parent.scm_instance.revisions:
 
            add_other_repo(org_repo.parent)
 
            c.default_other_repo = org_repo.parent.repo_name
 

	
 
        c.default_other_repo_info = other_repos_info[c.default_other_repo]
 
        c.other_repos_info = json.dumps(other_repos_info)
 

	
 
        return render('/pullrequests/pullrequest.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def create(self, repo_name):
 
        repo = RepoModel()._get_repo(repo_name)
 
        try:
 
            _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
 
        except formencode.Invalid, errors:
 
            log.error(traceback.format_exc())
 
            log.error(str(errors))
 
            msg = _('Error creating pull request: %s') % errors.msg
 
            h.flash(msg, 'error')
 
            raise HTTPBadRequest
 

	
 
        # heads up: org and other might seem backward here ...
 
        org_repo_name = _form['org_repo']
 
        org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name
 
        org_repo = RepoModel()._get_repo(org_repo_name)
 
        (org_ref_type,
 
         org_ref_name,
 
         org_rev) = org_ref.split(':')
 

	
 
        other_repo_name = _form['other_repo']
 
        other_ref = _form['other_ref'] # will have symbolic name and head revision
 
        other_repo = RepoModel()._get_repo(other_repo_name)
 
        (other_ref_type,
 
         other_ref_name,
 
         other_rev) = other_ref.split(':')
 

	
 
        cs_ranges, _cs_ranges_not, ancestor_rev = \
 
            CompareController._get_changesets(org_repo.scm_instance.alias,
 
                                              other_repo.scm_instance, other_rev, # org and other "swapped"
 
                                              org_repo.scm_instance, org_rev,
 
                                              )
 
        revisions = [cs.raw_id for cs in cs_ranges]
 

	
 
        # hack: ancestor_rev is not an other_rev but we want to show the
 
        # requested destination and have the exact ancestor
 
        other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 

	
 
        reviewers = _form['review_members']
 

	
 
        title = _form['pullrequest_title']
 
        if not title:
 
            title = '%s#%s to %s#%s' % (org_repo_name, h.short_ref(org_ref_type, org_ref_name),
 
                                        other_repo_name, h.short_ref(other_ref_type, other_ref_name))
 
        description = _form['pullrequest_desc'].strip() or _('No description')
 
        try:
 
            pull_request = PullRequestModel().create(
 
                self.authuser.user_id, org_repo_name, org_ref, other_repo_name,
 
                other_ref, revisions, reviewers, title, description
 
            )
 
            Session().commit()
 
            h.flash(_('Successfully opened new pull request'),
 
                    category='success')
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            return redirect(url('pullrequest_home', repo_name=repo_name))
 

	
 
        return redirect(url('pullrequest_show', repo_name=other_repo_name,
 
                            pull_request_id=pull_request.pull_request_id))
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def copy_update(self, repo_name, pull_request_id):
 
        old_pull_request = PullRequest.get_or_404(pull_request_id)
 
        assert old_pull_request.other_repo.repo_name == repo_name
 
        if old_pull_request.is_closed():
 
            raise HTTPForbidden()
 

	
 
        org_repo = RepoModel()._get_repo(old_pull_request.org_repo.repo_name)
 
        org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':')
 
        updaterev = request.POST.get('updaterev')
 
        if updaterev:
 
            new_org_rev = self._get_ref_rev(org_repo, 'rev', updaterev)
 
        else:
 
            # assert org_ref_type == 'branch', org_ref_type # TODO: what if not?
 
            new_org_rev = self._get_ref_rev(org_repo, org_ref_type, org_ref_name)
 

	
 
        other_repo = RepoModel()._get_repo(old_pull_request.other_repo.repo_name)
 
        other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor
 
        #assert other_ref_type == 'branch', other_ref_type # TODO: what if not?
 
        new_other_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name)
 

	
 
        cs_ranges, _cs_ranges_not, ancestor_rev = CompareController._get_changesets(org_repo.scm_instance.alias,
 
            other_repo.scm_instance, new_other_rev, # org and other "swapped"
 
            org_repo.scm_instance, new_org_rev)
 

	
 
        old_revisions = set(old_pull_request.revisions)
 
        revisions = [cs.raw_id for cs in cs_ranges]
 
        new_revisions = [r for r in revisions if r not in old_revisions]
 
        lost = old_revisions.difference(revisions)
 

	
 
        infos = ['','', 'This is an update of %s "%s".' %
 
                 (url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                      pull_request_id=pull_request_id, qualified=True),
 
                 (h.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                      pull_request_id=pull_request_id),
 
                  old_pull_request.title)]
 

	
 
        if lost:
 
            infos.append(_('Missing changesets since the previous version:'))
 
            for r in old_pull_request.revisions:
 
                if r in lost:
 
                    desc = org_repo.get_changeset(r).message.split('\n')[0]
 
                    infos.append('  %s "%s"' % (h.short_id(r), desc))
 

	
 
        if new_revisions:
 
            infos.append(_('New changesets on %s %s since the previous version:') % (org_ref_type, org_ref_name))
 
            for r in reversed(revisions):
 
                if r in new_revisions:
 
                    desc = org_repo.get_changeset(r).message.split('\n')[0]
 
                    infos.append('  %s "%s"' % (h.short_id(r), desc))
 

	
 
            if ancestor_rev == other_rev:
 
                infos.append(_("Ancestor didn't change - show diff since previous version: %s .") %
 
                             url('compare_url',
 
                             h.canonical_url('compare_url',
 
                                 repo_name=org_repo.repo_name, # other_repo is always same as repo_name
 
                                 org_ref_type='rev', org_ref_name=h.short_id(org_rev), # use old org_rev as base
 
                                 other_ref_type='rev', other_ref_name=h.short_id(new_org_rev),
 
                                 qualified=True)) # note: linear diff, merge or not doesn't matter
 
                                 )) # note: linear diff, merge or not doesn't matter
 
            else:
 
                infos.append(_('This pull request uses another merge ancestor than the previous version and they are not directly comparable.'))
 
        else:
 
           infos.append(_('No changes found on %s %s since previous version.') % (org_ref_type, org_ref_name))
 
           # TODO: fail?
 

	
 
        # hack: ancestor_rev is not an other_ref but we want to show the
 
        # requested destination and have the exact ancestor
 
        new_other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 
        new_org_ref = '%s:%s:%s' % (org_ref_type, org_ref_name, new_org_rev)
 

	
 
        reviewers = [r.user_id for r in old_pull_request.reviewers]
 
        try:
 
            old_title, old_v = re.match(r'(.*)\(v(\d+)\)\s*$', old_pull_request.title).groups()
 
            v = int(old_v) + 1
 
        except (AttributeError, ValueError):
 
            old_title = old_pull_request.title
 
            v = 2
 
        title = '%s (v%s)' % (old_title.strip(), v)
 
        description = (old_pull_request.description.rstrip() +
 
                       '\n'.join(infos))
 

	
 
        try:
 
            pull_request = PullRequestModel().create(
 
                self.authuser.user_id,
 
                old_pull_request.org_repo.repo_name, new_org_ref,
 
                old_pull_request.other_repo.repo_name, new_other_ref,
 
                revisions, reviewers, title, description
 
            )
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            return redirect(url('pullrequest_show', repo_name=repo_name,
 
                                pull_request_id=pull_request_id))
 

	
 
        ChangesetCommentsModel().create(
 
            text=_('Closed, replaced by %s .') % url('pullrequest_show',
 
            text=_('Closed, replaced by %s .') % h.canonical_url('pullrequest_show',
 
                                                   repo_name=old_pull_request.other_repo.repo_name,
 
                                                   pull_request_id=pull_request.pull_request_id,
 
                                                   qualified=True),
 
                                                   pull_request_id=pull_request.pull_request_id),
 
            repo=old_pull_request.other_repo.repo_id,
 
            user=c.authuser.user_id,
 
            pull_request=pull_request_id,
 
            closing_pr=True)
 
        PullRequestModel().close_pull_request(pull_request_id)
 

	
 
        Session().commit()
 
        h.flash(_('Pull request update created'),
 
                category='success')
 

	
 
        return redirect(url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                            pull_request_id=pull_request.pull_request_id))
 

	
 
    # pullrequest_post for PR editing
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def post(self, repo_name, pull_request_id):
 
        repo = RepoModel()._get_repo(repo_name)
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        old_description = pull_request.description
 

	
 
        _form = PullRequestPostForm()().to_python(request.POST)
 

	
 
        pull_request.title = _form['pullrequest_title']
 
        pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
 

	
 
        PullRequestModel().mention_from_description(pull_request, old_description)
 

	
 
        Session().commit()
 
        h.flash(_('Pull request updated'), category='success')
 

	
 
        return redirect(url('pullrequest_show', repo_name=repo.repo_name,
 
                            pull_request_id=pull_request_id))
 

	
 
    # pullrequest_update for updating reviewer list
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def update(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        if pull_request.is_closed():
 
            raise HTTPForbidden()
 
        #only owner or admin can update it
 
        owner = pull_request.author.user_id == c.authuser.user_id
 
        repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
 
        if h.HasPermissionAny('hg.admin') or repo_admin or owner:
 
            reviewers_ids = map(int, filter(lambda v: v not in [None, ''],
 
                request.POST.get('reviewers_ids', '').split(',')))
 

	
 
            PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
 
            Session().commit()
 
            return True
 
        raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def delete(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        #only owner can delete it !
 
        if pull_request.author.user_id == c.authuser.user_id:
 
            PullRequestModel().delete(pull_request)
 
            Session().commit()
 
            h.flash(_('Successfully deleted pull request'),
 
                    category='success')
 
            return redirect(url('my_account_pullrequests'))
 
        raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def show(self, repo_name, pull_request_id):
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 
        c.pull_request = PullRequest.get_or_404(pull_request_id)
 
        c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request)
 
        cc_model = ChangesetCommentsModel()
 
        cs_model = ChangesetStatusModel()
 

	
 
        # pull_requests repo_name we opened it against
 
        # ie. other_repo must match
 
        if repo_name != c.pull_request.other_repo.repo_name:
 
            raise HTTPNotFound
 

	
 
        # load compare data into template context
 
        self._load_compare_data(c.pull_request, enable_comments=True)
 

	
 
        # inline comments
 
        c.inline_cnt = 0
 
        c.inline_comments = cc_model.get_inline_comments(
 
                                c.db_repo.repo_id,
 
                                pull_request=pull_request_id)
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 
        # comments
 
        c.comments = cc_model.get_comments(c.db_repo.repo_id,
 
                                           pull_request=pull_request_id)
 

	
 
        # (badly named) pull-request status calculation based on reviewer votes
 
        (c.pull_request_reviewers,
 
         c.pull_request_pending_reviewers,
 
         c.current_voting_result,
 
         ) = cs_model.calculate_pull_request_result(c.pull_request)
 
        c.changeset_statuses = ChangesetStatus.STATUSES
 

	
 
        c.as_form = False
 
        c.ancestor = None # there is one - but right here we don't know which
 
        return render('/pullrequests/pullrequest_show.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def comment(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 

	
 
        status = request.POST.get('changeset_status')
 
        text = request.POST.get('text')
 
        close_pr = request.POST.get('save_close')
 

	
 
        allowed_to_change_status = self._get_is_allowed_change_status(pull_request)
 
        if status and allowed_to_change_status:
 
            _def = (_('Status change -> %s')
 
                            % ChangesetStatus.get_status_lbl(status))
 
            if close_pr:
 
                _def = _('Closing with') + ' ' + _def
 
            text = text or _def
 
        comm = ChangesetCommentsModel().create(
 
            text=text,
 
            repo=c.db_repo.repo_id,
 
            user=c.authuser.user_id,
 
            pull_request=pull_request_id,
 
            f_path=request.POST.get('f_path'),
 
            line_no=request.POST.get('line'),
 
            status_change=(ChangesetStatus.get_status_lbl(status)
 
                           if status and allowed_to_change_status else None),
 
            closing_pr=close_pr
 
        )
 

	
 
        action_logger(self.authuser,
 
                      'user_commented_pull_request:%s' % pull_request_id,
 
                      c.db_repo, self.ip_addr, self.sa)
 

	
 
        if allowed_to_change_status:
 
            # get status if set !
 
            if status:
 
                ChangesetStatusModel().set_status(
 
                    c.db_repo.repo_id,
 
                    status,
 
                    c.authuser.user_id,
 
                    comm,
 
                    pull_request=pull_request_id
 
                )
 

	
 
            if close_pr:
 
                PullRequestModel().close_pull_request(pull_request_id)
 
                action_logger(self.authuser,
 
                              'user_closed_pull_request:%s' % pull_request_id,
 
                              c.db_repo, self.ip_addr, self.sa)
 

	
 
        Session().commit()
 

	
 
        if not request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return redirect(h.url('pullrequest_show', repo_name=repo_name,
 
                                  pull_request_id=pull_request_id))
 

	
 
        data = {
 
           'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
 
        }
 
        if comm:
 
            c.co = comm
 
            data.update(comm.get_dict())
 
            data.update({'rendered_text':
 
                         render('changeset/changeset_comment_block.html')})
 

	
 
        return data
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
kallithea/lib/dbmigrate/schema/db_1_5_2.py
Show inline comments
 
@@ -747,388 +747,388 @@ class Repository(Base, BaseModel):
 
    def normalize_repo_name(cls, repo_name):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*p)
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def inject_ui(cls, repo, extras={}):
 
        from kallithea.lib.vcs.backends.hg import MercurialRepository
 
        from kallithea.lib.vcs.backends.git import GitRepository
 
        required = (MercurialRepository, GitRepository)
 
        if not isinstance(repo, required):
 
            raise Exception('repo must be instance of %s' % (','.join(required)))
 

	
 
        # inject ui extra param to log this action via push logger
 
        for k, v in extras.items():
 
            repo._repo.ui.setconfig('extras', k, v)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache
 
        )
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id):
 
        repo.locked = [user_id, time.time()]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        :type revisions: list
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, cache_map=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 
        log.debug('Getting cached instance of repo')
 

	
 
        if cache_map:
 
            # get using prefilled cache_map
 
            invalidate_repo = cache_map[self.repo_name]
 
            if invalidate_repo:
 
                invalidate_repo = (None if invalidate_repo.cache_active
 
                                   else invalidate_repo)
 
        else:
 
            # get from invalidate
 
            invalidate_repo = self.invalidate
 

	
 
        if invalidate_repo is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(invalidate_repo.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository' % alias)
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
kallithea/lib/dbmigrate/schema/db_1_6_0.py
Show inline comments
 
@@ -817,388 +817,388 @@ class Repository(Base, BaseModel):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id):
 
        repo.locked = [user_id, time.time()]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        :type revisions: list
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, cache_map=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 
        log.debug('Getting cached instance of repo')
 

	
 
        if cache_map:
 
            # get using prefilled cache_map
 
            invalidate_repo = cache_map[self.repo_name]
 
            if invalidate_repo:
 
                invalidate_repo = (None if invalidate_repo.cache_active
 
                                   else invalidate_repo)
 
        else:
 
            # get from invalidate
 
            invalidate_repo = self.invalidate
 

	
 
        if invalidate_repo is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(invalidate_repo.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
kallithea/lib/dbmigrate/schema/db_1_7_0.py
Show inline comments
 
@@ -848,388 +848,388 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
@@ -1979,248 +1979,248 @@ class PullRequest(Base, BaseModel):
 

	
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
          revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_1_8_0.py
Show inline comments
 
@@ -878,388 +878,388 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
@@ -2026,248 +2026,248 @@ class PullRequest(Base, BaseModel):
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_2_0_0.py
Show inline comments
 
@@ -935,388 +935,388 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
@@ -2086,248 +2086,248 @@ class PullRequest(Base, BaseModel):
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_2_0_1.py
Show inline comments
 
@@ -934,388 +934,388 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
@@ -2087,248 +2087,248 @@ class PullRequest(Base, BaseModel):
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_2_0_2.py
Show inline comments
 
@@ -951,388 +951,388 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 
@@ -2108,248 +2108,248 @@ class PullRequest(Base, BaseModel):
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_2_1_0.py
Show inline comments
 
@@ -990,388 +990,388 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 
@@ -2147,248 +2147,248 @@ class PullRequest(Base, BaseModel):
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_2_2_0.py
Show inline comments
 
@@ -1030,386 +1030,386 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        qualified_home_url = url('home', qualified=True)
 
        import kallithea.lib.helpers as h
 
        qualified_home_url = h.canonical_url('home')
 

	
 
        uri_tmpl = None
 
        if 'uri_tmpl' in override:
 
            uri_tmpl = override['uri_tmpl']
 
            del override['uri_tmpl']
 

	
 
        # we didn't override our tmpl from **overrides
 
        if not uri_tmpl:
 
            uri_tmpl = self.DEFAULT_CLONE_URI
 
            try:
 
                from pylons import tmpl_context as c
 
                uri_tmpl = c.clone_uri_tmpl
 
            except Exception:
 
                # in any case if we call this outside of request context,
 
                # ie, not having tmpl_context set up
 
                pass
 

	
 
        return get_clone_url(uri_tmpl=uri_tmpl,
 
                             qualifed_home_url=qualified_home_url,
 
                             repo_name=self.repo_name,
 
                             repo_id=self.repo_id, **override)
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
@@ -2204,248 +2204,248 @@ class PullRequest(Base, BaseModel):
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 
    DEFAULT_FILENAME = u'gistfile1.txt'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    def __repr__(self):
 
        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/dbmigrate/schema/db_2_2_3.py
Show inline comments
 
@@ -1050,386 +1050,386 @@ class Repository(Base, BaseModel):
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        qualified_home_url = url('home', qualified=True)
 
        import kallithea.lib.helpers as h
 
        qualified_home_url = h.canonical_url('home')
 

	
 
        uri_tmpl = None
 
        if 'with_id' in override:
 
            uri_tmpl = self.DEFAULT_CLONE_URI_ID
 
            del override['with_id']
 

	
 
        if 'uri_tmpl' in override:
 
            uri_tmpl = override['uri_tmpl']
 
            del override['uri_tmpl']
 

	
 
        # we didn't override our tmpl from **overrides
 
        if not uri_tmpl:
 
            uri_tmpl = self.DEFAULT_CLONE_URI
 
            try:
 
                from pylons import tmpl_context as c
 
                uri_tmpl = c.clone_uri_tmpl
 
            except Exception:
 
                # in any case if we call this outside of request context,
 
                # ie, not having tmpl_context set up
 
                pass
 

	
 
        return get_clone_url(uri_tmpl=uri_tmpl,
 
                             qualifed_home_url=qualified_home_url,
 
                             repo_name=self.repo_name,
 
                             repo_id=self.repo_id, **override)
 

	
 
    def set_state(self, state):
 
        self.repo_state = state
 
        Session().add(self)
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting obj for %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
@@ -2250,248 +2250,248 @@ class PullRequest(Base, BaseModel):
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 
    DEFAULT_FILENAME = u'gistfile1.txt'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    def __repr__(self):
 
        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/lib/helpers.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Helper functions
 

	
 
Consists of functions to typically be used within templates, but also
 
available to Controllers. This module is available to both as 'h'.
 
"""
 
import random
 
import hashlib
 
import StringIO
 
import math
 
import logging
 
import re
 
import urlparse
 
import textwrap
 

	
 
from pygments.formatters.html import HtmlFormatter
 
from pygments import highlight as code_highlight
 
from pylons import url
 
from pylons.i18n.translation import _, ungettext
 
from hashlib import md5
 

	
 
from webhelpers.html import literal, HTML, escape
 
from webhelpers.html.tools import *
 
from webhelpers.html.builder import make_tag
 
from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
 
    end_form, file, form, hidden, image, javascript_link, link_to, \
 
    link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
 
    submit, text, password, textarea, title, ul, xml_declaration, radio
 
from webhelpers.html.tools import auto_link, button_to, highlight, \
 
    js_obfuscate, mail_to, strip_links, strip_tags, tag_re
 
from webhelpers.number import format_byte_size, format_bit_size
 
from webhelpers.pylonslib import Flash as _Flash
 
from webhelpers.pylonslib.secure_form import secure_form
 
from webhelpers.text import chop_at, collapse, convert_accented_entities, \
 
    convert_misc_entities, lchop, plural, rchop, remove_formatting, \
 
    replace_whitespace, urlify, truncate, wrap_paragraphs
 
from webhelpers.date import time_ago_in_words
 
from webhelpers.paginate import Page as _Page
 
from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
 
    convert_boolean_attrs, NotGiven, _make_safe_id_component
 

	
 
from kallithea.lib.annotate import annotate_highlight
 
from kallithea.lib.utils import repo_name_slug, get_custom_lexer
 
from kallithea.lib.utils2 import str2bool, safe_unicode, safe_str, \
 
    get_changeset_safe, datetime_to_time, time_to_datetime, AttributeDict,\
 
    safe_int
 
from kallithea.lib.markup_renderer import MarkupRenderer
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.config.conf import DATE_FORMAT, DATETIME_FORMAT
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.db import URL_SEP, Permission
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def canonical_url(*args, **kargs):
 
    '''Like url(x, qualified=True), but returns url that not only is qualified
 
    but also canonical, as configured in canonical_url'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        kargs['host'] = parts[1].split('/', 1)[0]
 
        kargs['protocol'] = parts[0]
 
    except IndexError:
 
        kargs['qualified'] = True
 
    return url(*args, **kargs)
 

	
 
def html_escape(text, html_escape_table=None):
 
    """Produce entities within text."""
 
    if not html_escape_table:
 
        html_escape_table = {
 
            "&": "&amp;",
 
            '"': "&quot;",
 
            "'": "&apos;",
 
            ">": "&gt;",
 
            "<": "&lt;",
 
        }
 
    return "".join(html_escape_table.get(c, c) for c in text)
 

	
 

	
 
def shorter(text, size=20):
 
    postfix = '...'
 
    if len(text) > size:
 
        return text[:size - len(postfix)] + postfix
 
    return text
 

	
 

	
 
def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
 
    """
 
    Reset button
 
    """
 
    _set_input_attrs(attrs, type, name, value)
 
    _set_id_attr(attrs, id, name)
 
    convert_boolean_attrs(attrs, ["disabled"])
 
    return HTML.input(**attrs)
 

	
 
reset = _reset
 
safeid = _make_safe_id_component
 

	
 

	
 
def FID(raw_id, path):
 
    """
 
    Creates a uniqe ID for filenode based on it's hash of path and revision
 
    it's safe to use in urls
 

	
 
    :param raw_id:
 
    :param path:
 
    """
 

	
 
    return 'C-%s-%s' % (short_id(raw_id), md5(safe_str(path)).hexdigest()[:12])
 

	
 

	
 
def get_token():
 
    """Return the current authentication token, creating one if one doesn't
 
    already exist.
 
    """
 
    token_key = "_authentication_token"
 
    from pylons import session
 
    if not token_key in session:
 
        try:
 
            token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
 
        except AttributeError:  # Python < 2.4
 
            token = hashlib.sha1(str(random.randrange(2 ** 128))).hexdigest()
 
        session[token_key] = token
 
        if hasattr(session, 'save'):
 
            session.save()
 
    return session[token_key]
 

	
 

	
 
class _GetError(object):
 
    """Get error from form_errors, and represent it as span wrapped error
 
    message
 

	
 
    :param field_name: field to fetch errors for
 
    :param form_errors: form errors dict
 
    """
 

	
 
    def __call__(self, field_name, form_errors):
 
        tmpl = """<span class="error_msg">%s</span>"""
 
        if form_errors and field_name in form_errors:
 
            return literal(tmpl % form_errors.get(field_name))
 

	
 
get_error = _GetError()
 

	
 

	
 
class _ToolTip(object):
 

	
 
    def __call__(self, tooltip_title, trim_at=50):
 
        """
 
        Special function just to wrap our text into nice formatted
 
        autowrapped text
 

	
 
        :param tooltip_title:
 
        """
 
        tooltip_title = escape(tooltip_title)
 
        tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
 
        return tooltip_title
 
tooltip = _ToolTip()
 

	
 

	
 
class _FilesBreadCrumbs(object):
 

	
 
    def __call__(self, repo_name, rev, paths):
 
        if isinstance(paths, str):
 
            paths = safe_unicode(paths)
 
        url_l = [link_to(repo_name, url('files_home',
 
                                        repo_name=repo_name,
 
                                        revision=rev, f_path=''),
 
                         class_='ypjax-link')]
 
        paths_l = paths.split('/')
 
        for cnt, p in enumerate(paths_l):
 
            if p != '':
 
                url_l.append(link_to(p,
 
                                     url('files_home',
 
                                         repo_name=repo_name,
 
                                         revision=rev,
 
                                         f_path='/'.join(paths_l[:cnt + 1])
 
                                         ),
 
                                     class_='ypjax-link'
 
                                     )
 
                             )
 

	
 
        return literal('/'.join(url_l))
 

	
 
files_breadcrumbs = _FilesBreadCrumbs()
 

	
 

	
 
class CodeHtmlFormatter(HtmlFormatter):
 
    """
 
    My code Html Formatter for source codes
 
    """
 

	
 
    def wrap(self, source, outfile):
 
        return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
 

	
 
    def _wrap_code(self, source):
 
        for cnt, it in enumerate(source):
 
            i, t = it
 
            t = '<div id="L%s">%s</div>' % (cnt + 1, t)
 
            yield i, t
 

	
 
    def _wrap_tablelinenos(self, inner):
 
        dummyoutfile = StringIO.StringIO()
 
        lncount = 0
 
        for t, line in inner:
 
            if t:
 
                lncount += 1
 
            dummyoutfile.write(line)
 

	
 
        fl = self.linenostart
 
        mw = len(str(lncount + fl - 1))
 
        sp = self.linenospecial
 
        st = self.linenostep
 
        la = self.lineanchors
 
        aln = self.anchorlinenos
 
        nocls = self.noclasses
 
        if sp:
 
            lines = []
 

	
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if i % sp == 0:
 
                        if aln:
 
                            lines.append('<a href="#%s%d" class="special">%*d</a>' %
 
                                         (la, i, mw, i))
 
                        else:
 
                            lines.append('<span class="special">%*d</span>' % (mw, i))
 
                    else:
 
                        if aln:
 
                            lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                        else:
 
                            lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 
        else:
 
            lines = []
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if aln:
 
                        lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                    else:
 
                        lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 

	
 
        # in case you wonder about the seemingly redundant <div> here: since the
 
        # content in the other cell also is wrapped in a div, some browsers in
 
        # some configurations seem to mess up the formatting...
 
        if nocls:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td><div class="linenodiv" '
 
                      'style="background-color: #f0f0f0; padding-right: 10px">'
 
                      '<pre style="line-height: 125%">' +
 
                      ls + '</pre></div></td><td id="hlcode" class="code">')
 
        else:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td class="linenos"><div class="linenodiv"><pre>' +
kallithea/model/comment.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.comment
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
comments model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 11, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 

	
 
from pylons.i18n.translation import _
 
from sqlalchemy.util.compat import defaultdict
 

	
 
from kallithea.lib.utils2 import extract_mentioned_users, safe_unicode
 
from kallithea.lib import helpers as h
 
from kallithea.model import BaseModel
 
from kallithea.model.db import ChangesetComment, User, \
 
    Notification, PullRequest
 
from kallithea.model.notification import NotificationModel
 
from kallithea.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ChangesetCommentsModel(BaseModel):
 

	
 
    cls = ChangesetComment
 

	
 
    def __get_changeset_comment(self, changeset_comment):
 
        return self._get_instance(ChangesetComment, changeset_comment)
 

	
 
    def __get_pull_request(self, pull_request):
 
        return self._get_instance(PullRequest, pull_request)
 

	
 
    def _extract_mentions(self, s):
 
        user_objects = []
 
        for username in extract_mentioned_users(s):
 
            user_obj = User.get_by_username(username, case_insensitive=True)
 
            if user_obj:
 
                user_objects.append(user_obj)
 
        return user_objects
 

	
 
    def _get_notification_data(self, repo, comment, user, comment_text,
 
                               line_no=None, revision=None, pull_request=None,
 
                               status_change=None, closing_pr=False):
 
        """
 
        Get notification data
 

	
 
        :param comment_text:
 
        :param line:
 
        :returns: tuple (subj,body,recipients,notification_type,email_kwargs)
 
        """
 
        # make notification
 
        body = comment_text  # text of the comment
 
        line = ''
 
        if line_no:
 
            line = _('on line %s') % line_no
 

	
 
        #changeset
 
        if revision:
 
            notification_type = Notification.TYPE_CHANGESET_COMMENT
 
            cs = repo.scm_instance.get_changeset(revision)
 
            desc = "%s" % (cs.short_id)
 

	
 
            revision_url = h.url('changeset_home',
 
            revision_url = h.canonical_url('changeset_home',
 
                repo_name=repo.repo_name,
 
                revision=revision)
 
            comment_url = h.canonical_url('changeset_home',
 
                repo_name=repo.repo_name,
 
                revision=revision,
 
                qualified=True,)
 
            comment_url = h.url('changeset_home',
 
                repo_name=repo.repo_name,
 
                revision=revision,
 
                anchor='comment-%s' % comment.comment_id,
 
                qualified=True,
 
            )
 
                anchor='comment-%s' % comment.comment_id)
 
            subj = safe_unicode(
 
                h.link_to('Re changeset: %(desc)s %(line)s' % \
 
                          {'desc': desc, 'line': line},
 
                          comment_url)
 
            )
 
            # get the current participants of this changeset
 
            recipients = ChangesetComment.get_users(revision=revision)
 
            # add changeset author if it's in kallithea system
 
            cs_author = User.get_from_cs_author(cs.author)
 
            if not cs_author:
 
                #use repo owner if we cannot extract the author correctly
 
                cs_author = repo.user
 
            recipients += [cs_author]
 
            email_kwargs = {
 
                'status_change': status_change,
 
                'cs_comment_user': h.person(user),
 
                'cs_target_repo': h.url('summary_home', repo_name=repo.repo_name,
 
                                        qualified=True),
 
                'cs_target_repo': h.canonical_url('summary_home', repo_name=repo.repo_name),
 
                'cs_comment_url': comment_url,
 
                'raw_id': revision,
 
                'message': cs.message,
 
                'repo_name': repo.repo_name,
 
                'short_id': h.short_id(revision),
 
                'branch': cs.branch,
 
                'comment_username': user.username,
 
                'threading': [revision_url, comment_url], # TODO: url to line number
 
            }
 
        #pull request
 
        elif pull_request:
 
            notification_type = Notification.TYPE_PULL_REQUEST_COMMENT
 
            desc = comment.pull_request.title
 
            _org_ref_type, org_ref_name, _org_rev = comment.pull_request.org_ref.split(':')
 
            pr_url = h.url('pullrequest_show',
 
            pr_url = h.canonical_url('pullrequest_show',
 
                repo_name=pull_request.other_repo.repo_name,
 
                pull_request_id=pull_request.pull_request_id)
 
            comment_url = h.canonical_url('pullrequest_show',
 
                repo_name=pull_request.other_repo.repo_name,
 
                pull_request_id=pull_request.pull_request_id,
 
                qualified=True,)
 
            comment_url = h.url('pullrequest_show',
 
                repo_name=pull_request.other_repo.repo_name,
 
                pull_request_id=pull_request.pull_request_id,
 
                anchor='comment-%s' % comment.comment_id,
 
                qualified=True,
 
            )
 
                anchor='comment-%s' % comment.comment_id)
 
            subj = safe_unicode(
 
                h.link_to('Re pull request #%(pr_id)s: %(desc)s %(line)s' % \
 
                          {'desc': desc,
 
                           'pr_id': comment.pull_request.pull_request_id,
 
                           'line': line},
 
                          comment_url)
 
            )
 
            # get the current participants of this pull request
 
            recipients = ChangesetComment.get_users(pull_request_id=
 
                                                pull_request.pull_request_id)
 
            # add pull request author
 
            recipients += [pull_request.author]
 

	
 
            # add the reviewers to notification
 
            recipients += [x.user for x in pull_request.reviewers]
 

	
 
            #set some variables for email notification
 
            email_kwargs = {
 
                'pr_title': pull_request.title,
 
                'pr_id': pull_request.pull_request_id,
 
                'status_change': status_change,
 
                'closing_pr': closing_pr,
 
                'pr_comment_url': comment_url,
 
                'pr_comment_user': h.person(user),
 
                'pr_target_repo': h.url('summary_home',
 
                                   repo_name=pull_request.other_repo.repo_name,
 
                                   qualified=True),
 
                'pr_target_repo': h.canonical_url('summary_home',
 
                                   repo_name=pull_request.other_repo.repo_name),
 
                'repo_name': pull_request.other_repo.repo_name,
 
                'ref': org_ref_name,
 
                'comment_username': user.username,
 
                'threading': [pr_url, comment_url], # TODO: url to line number
 
            }
 

	
 
        return subj, body, recipients, notification_type, email_kwargs
 

	
 
    def create(self, text, repo, user, revision=None, pull_request=None,
 
               f_path=None, line_no=None, status_change=None, closing_pr=False,
 
               send_email=True):
 
        """
 
        Creates new comment for changeset or pull request.
 
        If status_change is not None this comment is associated with a
 
        status change of changeset or changesets associated with pull request
 

	
 
        :param text:
 
        :param repo:
 
        :param user:
 
        :param revision:
 
        :param pull_request: (for emails, not for comments)
 
        :param f_path:
 
        :param line_no:
 
        :param status_change: (for emails, not for comments)
 
        :param closing_pr: (for emails, not for comments)
 
        :param send_email: also send email
 
        """
 
        if not text:
 
            log.warning('Missing text for comment, skipping...')
 
            return
 

	
 
        repo = self._get_repo(repo)
 
        user = self._get_user(user)
 
        comment = ChangesetComment()
 
        comment.repo = repo
 
        comment.author = user
 
        comment.text = text
 
        comment.f_path = f_path
 
        comment.line_no = line_no
 

	
 
        if revision:
 
            comment.revision = revision
 
        elif pull_request:
 
            pull_request = self.__get_pull_request(pull_request)
 
            comment.pull_request = pull_request
 
        else:
 
            raise Exception('Please specify revision or pull_request_id')
 

	
 
        Session().add(comment)
 
        Session().flush()
 

	
 
        if send_email:
 
            (subj, body, recipients, notification_type,
 
             email_kwargs) = self._get_notification_data(
 
                                repo, comment, user,
 
                                comment_text=text,
 
                                line_no=line_no,
 
                                revision=revision,
 
                                pull_request=pull_request,
 
                                status_change=status_change,
 
                                closing_pr=closing_pr)
 
            email_kwargs['is_mention'] = False
 
            # create notification objects, and emails
 
            NotificationModel().create(
 
                created_by=user, subject=subj, body=body,
 
                recipients=recipients, type_=notification_type,
 
                email_kwargs=email_kwargs,
 
            )
 

	
 
            mention_recipients = set(self._extract_mentions(body))\
 
                                    .difference(recipients)
 
            if mention_recipients:
 
                email_kwargs['is_mention'] = True
 
                subj = _('[Mention]') + ' ' + subj
 
                NotificationModel().create(
 
                    created_by=user, subject=subj, body=body,
 
                    recipients=mention_recipients,
 
                    type_=notification_type,
 
                    email_kwargs=email_kwargs
 
                )
 

	
 
        return comment
 

	
 
    def delete(self, comment):
 
        """
 
        Deletes given comment
 

	
 
        :param comment_id:
 
        """
 
        comment = self.__get_changeset_comment(comment)
 
        Session().delete(comment)
 

	
 
        return comment
 

	
 
    def get_comments(self, repo_id, revision=None, pull_request=None):
 
        """
 
        Gets main comments based on revision or pull_request_id
 

	
 
        :param repo_id:
 
        :param revision:
 
        :param pull_request:
 
        """
 

	
 
        q = ChangesetComment.query()\
 
                .filter(ChangesetComment.repo_id == repo_id)\
 
                .filter(ChangesetComment.line_no == None)\
 
                .filter(ChangesetComment.f_path == None)
 
        if revision:
 
            q = q.filter(ChangesetComment.revision == revision)
 
        elif pull_request:
 
            pull_request = self.__get_pull_request(pull_request)
 
            q = q.filter(ChangesetComment.pull_request == pull_request)
 
        else:
 
            raise Exception('Please specify revision or pull_request')
 
        q = q.order_by(ChangesetComment.created_on)
 
        return q.all()
 

	
 
    def get_inline_comments(self, repo_id, revision=None, pull_request=None):
 
        q = Session().query(ChangesetComment)\
 
            .filter(ChangesetComment.repo_id == repo_id)\
 
            .filter(ChangesetComment.line_no != None)\
 
            .filter(ChangesetComment.f_path != None)\
 
            .order_by(ChangesetComment.comment_id.asc())\
 

	
 
        if revision:
 
            q = q.filter(ChangesetComment.revision == revision)
 
        elif pull_request:
 
            pull_request = self.__get_pull_request(pull_request)
 
            q = q.filter(ChangesetComment.pull_request == pull_request)
 
        else:
 
            raise Exception('Please specify revision or pull_request_id')
 

	
 
        comments = q.all()
 

	
 
        paths = defaultdict(lambda: defaultdict(list))
 

	
 
        for co in comments:
 
            paths[co.f_path][co.line_no].append(co)
 
        return paths.items()
kallithea/model/db.py
Show inline comments
 
@@ -1059,386 +1059,386 @@ class Repository(Base, BaseModel):
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    @property
 
    def clone_uri_hidden(self):
 
        clone_uri = self.clone_uri
 
        if clone_uri:
 
            import urlobject
 
            url_obj = urlobject.URLObject(self.clone_uri)
 
            if url_obj.password:
 
                clone_uri = url_obj.with_password('*****')
 
        return clone_uri
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        qualified_home_url = url('home', qualified=True)
 
        import kallithea.lib.helpers as h
 
        qualified_home_url = h.canonical_url('home')
 

	
 
        uri_tmpl = None
 
        if 'with_id' in override:
 
            uri_tmpl = self.DEFAULT_CLONE_URI_ID
 
            del override['with_id']
 

	
 
        if 'uri_tmpl' in override:
 
            uri_tmpl = override['uri_tmpl']
 
            del override['uri_tmpl']
 

	
 
        # we didn't override our tmpl from **overrides
 
        if not uri_tmpl:
 
            uri_tmpl = self.DEFAULT_CLONE_URI
 
            try:
 
                from pylons import tmpl_context as c
 
                uri_tmpl = c.clone_uri_tmpl
 
            except Exception:
 
                # in any case if we call this outside of request context,
 
                # ie, not having tmpl_context set up
 
                pass
 

	
 
        return get_clone_url(uri_tmpl=uri_tmpl,
 
                             qualifed_home_url=qualified_home_url,
 
                             repo_name=self.repo_name,
 
                             repo_id=self.repo_id, **override)
 

	
 
    def set_state(self, state):
 
        self.repo_state = state
 
        Session().add(self)
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('changeset_cache for %s already up to date with %s'
 
                      % (self.repo_name, cs_cache['raw_id']))
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 
        if not revisions:
 
            return {}
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)\
 
            .filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        stat = ChangesetStatus.DEFAULT
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting scm_instance of %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
@@ -2258,248 +2258,248 @@ class PullRequest(Base, BaseModel):
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return str(self.statuses[-1].status) if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 
    DEFAULT_FILENAME = u'gistfile1.txt'
 

	
 
    gist_id = Column('gist_id', Integer(), primary_key=True)
 
    gist_access_id = Column('gist_access_id', Unicode(250))
 
    gist_description = Column('gist_description', UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column('gist_expires', Float(53), nullable=False)
 
    gist_type = Column('gist_type', Unicode(128), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    def __repr__(self):
 
        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        from pylons import url
 
        return url('gist', gist_id=self.gist_access_id, qualified=True)
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
kallithea/model/pull_request.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.pull_request
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
pull request model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 6, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import datetime
 

	
 
from pylons.i18n.translation import _
 

	
 
from kallithea.model.meta import Session
 
from kallithea.lib import helpers as h
 
from kallithea.model import BaseModel
 
from kallithea.model.db import PullRequest, PullRequestReviewers, Notification,\
 
    ChangesetStatus, User
 
from kallithea.model.notification import NotificationModel
 
from kallithea.lib.utils2 import extract_mentioned_users, safe_unicode
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PullRequestModel(BaseModel):
 

	
 
    cls = PullRequest
 

	
 
    def __get_pull_request(self, pull_request):
 
        return self._get_instance(PullRequest, pull_request)
 

	
 
    def get_pullrequest_cnt_for_user(self, user):
 
        return PullRequest.query()\
 
                                .join(PullRequestReviewers)\
 
                                .filter(PullRequestReviewers.user_id == user)\
 
                                .filter(PullRequest.status != PullRequest.STATUS_CLOSED)\
 
                                .count()
 

	
 
    def get_all(self, repo_name, from_=False, closed=False):
 
        """Get all PRs for repo.
 
        Default is all PRs to the repo, PRs from the repo if from_.
 
        Closed PRs are only included if closed is true."""
 
        repo = self._get_repo(repo_name)
 
        q = PullRequest.query()
 
        if from_:
 
            q = q.filter(PullRequest.org_repo == repo)
 
        else:
 
            q = q.filter(PullRequest.other_repo == repo)
 
        if not closed:
 
            q = q.filter(PullRequest.status != PullRequest.STATUS_CLOSED)
 
        return q.order_by(PullRequest.created_on.desc()).all()
 

	
 
    def create(self, created_by, org_repo, org_ref, other_repo, other_ref,
 
               revisions, reviewers, title, description=None):
 
        from kallithea.model.changeset_status import ChangesetStatusModel
 

	
 
        created_by_user = self._get_user(created_by)
 
        org_repo = self._get_repo(org_repo)
 
        other_repo = self._get_repo(other_repo)
 

	
 
        new = PullRequest()
 
        new.org_repo = org_repo
 
        new.org_ref = org_ref
 
        new.other_repo = other_repo
 
        new.other_ref = other_ref
 
        new.revisions = revisions
 
        new.title = title
 
        new.description = description
 
        new.author = created_by_user
 
        Session().add(new)
 
        Session().flush()
 

	
 
        #reset state to under-review
 
        from kallithea.model.comment import ChangesetCommentsModel
 
        comment = ChangesetCommentsModel().create(
 
            text=u'Auto status change to %s' % (ChangesetStatus.get_status_lbl(ChangesetStatus.STATUS_UNDER_REVIEW)),
 
            repo=org_repo,
 
            user=new.author,
 
            pull_request=new,
 
            send_email=False
 
        )
 
        ChangesetStatusModel().set_status(
 
            org_repo,
 
            ChangesetStatus.STATUS_UNDER_REVIEW,
 
            new.author,
 
            comment,
 
            pull_request=new
 
        )
 

	
 
        mention_recipients = set(User.get_by_username(username, case_insensitive=True)
 
                                 for username in extract_mentioned_users(new.description))
 
        self.__add_reviewers(new, reviewers, mention_recipients)
 

	
 
        return new
 

	
 
    def __add_reviewers(self, pr, reviewers, mention_recipients=None):
 
        #members
 
        for member in set(reviewers):
 
            _usr = self._get_user(member)
 
            reviewer = PullRequestReviewers(_usr, pr)
 
            Session().add(reviewer)
 

	
 
        revision_data = [(x.raw_id, x.message)
 
                         for x in map(pr.org_repo.get_changeset, pr.revisions)]
 

	
 
        #notification to reviewers
 
        pr_url = h.url('pullrequest_show', repo_name=pr.other_repo.repo_name,
 
                       pull_request_id=pr.pull_request_id,
 
                       qualified=True)
 
        pr_url = h.canonical_url('pullrequest_show', repo_name=pr.other_repo.repo_name,
 
                       pull_request_id=pr.pull_request_id)
 
        subject = safe_unicode(
 
            h.link_to(
 
              _('%(user)s wants you to review pull request #%(pr_id)s: %(pr_title)s') % \
 
                {'user': pr.author.username,
 
                 'pr_title': pr.title,
 
                 'pr_id': pr.pull_request_id},
 
                pr_url)
 
            )
 
        body = pr.description
 
        _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':')
 
        email_kwargs = {
 
            'pr_title': pr.title,
 
            'pr_user_created': h.person(pr.author),
 
            'pr_repo_url': h.url('summary_home', repo_name=pr.other_repo.repo_name,
 
                                 qualified=True,),
 
            'pr_repo_url': h.canonical_url('summary_home', repo_name=pr.other_repo.repo_name),
 
            'pr_url': pr_url,
 
            'pr_revisions': revision_data,
 
            'repo_name': pr.other_repo.repo_name,
 
            'pr_id': pr.pull_request_id,
 
            'ref': org_ref_name,
 
            'pr_username': pr.author.username,
 
            'threading': [pr_url],
 
            'is_mention': False,
 
            }
 
        if reviewers:
 
            NotificationModel().create(created_by=pr.author, subject=subject, body=body,
 
                                       recipients=reviewers,
 
                                       type_=Notification.TYPE_PULL_REQUEST,
 
                                       email_kwargs=email_kwargs)
 

	
 
        if mention_recipients:
 
            mention_recipients.discard(None)
 
            mention_recipients.difference_update(reviewers)
 
        if mention_recipients:
 
            email_kwargs['is_mention'] = True
 
            subject = _('[Mention]') + ' ' + subject
 

	
 
            NotificationModel().create(created_by=pr.author, subject=subject, body=body,
 
                                       recipients=mention_recipients,
 
                                       type_=Notification.TYPE_PULL_REQUEST,
 
                                       email_kwargs=email_kwargs)
 

	
 
    def mention_from_description(self, pr, old_description=''):
 
        mention_recipients = set(User.get_by_username(username, case_insensitive=True)
 
                                 for username in extract_mentioned_users(pr.description))
 
        mention_recipients.difference_update(User.get_by_username(username, case_insensitive=True)
 
                                             for username in extract_mentioned_users(old_description))
 

	
 
        log.debug("Mentioning %s" % mention_recipients)
 
        self.__add_reviewers(pr, [], mention_recipients)
 

	
 
    def update_reviewers(self, pull_request, reviewers_ids):
 
        reviewers_ids = set(reviewers_ids)
 
        pull_request = self.__get_pull_request(pull_request)
 
        current_reviewers = PullRequestReviewers.query()\
 
                            .filter(PullRequestReviewers.pull_request==
 
                                   pull_request)\
 
                            .all()
 
        current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
 

	
 
        to_add = reviewers_ids.difference(current_reviewers_ids)
 
        to_remove = current_reviewers_ids.difference(reviewers_ids)
 

	
 
        log.debug("Adding %s reviewers" % to_add)
 
        self.__add_reviewers(pull_request, to_add)
 

	
 
        log.debug("Removing %s reviewers" % to_remove)
 
        for uid in to_remove:
 
            reviewer = PullRequestReviewers.query()\
 
                    .filter(PullRequestReviewers.user_id==uid,
 
                            PullRequestReviewers.pull_request==pull_request)\
 
                    .scalar()
 
            if reviewer:
 
                Session().delete(reviewer)
 

	
 
    def delete(self, pull_request):
 
        pull_request = self.__get_pull_request(pull_request)
 
        Session().delete(pull_request)
 

	
 
    def close_pull_request(self, pull_request):
 
        pull_request = self.__get_pull_request(pull_request)
 
        pull_request.status = PullRequest.STATUS_CLOSED
 
        pull_request.updated_on = datetime.datetime.now()
 
        Session().add(pull_request)
kallithea/model/user.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.user
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
users model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import traceback
 
from pylons import url
 
from pylons.i18n.translation import _
 

	
 
from sqlalchemy.exc import DatabaseError
 

	
 
from kallithea import EXTERN_TYPE_INTERNAL
 
from kallithea.lib.utils2 import safe_unicode, generate_api_key, get_current_authuser
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.model import BaseModel
 
from kallithea.model.db import User, UserToPerm, Notification, \
 
    UserEmailMap, UserIpMap
 
from kallithea.lib.exceptions import DefaultUserException, \
 
    UserOwnsReposException
 
from kallithea.model.meta import Session
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserModel(BaseModel):
 
    cls = User
 

	
 
    def get(self, user_id, cache=False):
 
        user = self.sa.query(User)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % user_id))
 
        return user.get(user_id)
 

	
 
    def get_user(self, user):
 
        return self._get_user(user)
 

	
 
    def get_by_username(self, username, cache=False, case_insensitive=False):
 

	
 
        if case_insensitive:
 
            user = self.sa.query(User).filter(User.username.ilike(username))
 
        else:
 
            user = self.sa.query(User)\
 
                .filter(User.username == username)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % username))
 
        return user.scalar()
 

	
 
    def get_by_email(self, email, cache=False, case_insensitive=False):
 
        return User.get_by_email(email, case_insensitive, cache)
 

	
 
    def get_by_api_key(self, api_key, cache=False):
 
        return User.get_by_api_key(api_key, cache)
 

	
 
    def create(self, form_data, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.hooks import log_create_user, check_allowed_create_user
 
        _fd = form_data
 
        user_data = {
 
            'username': _fd['username'], 'password': _fd['password'],
 
            'email': _fd['email'], 'firstname': _fd['firstname'], 'lastname': _fd['lastname'],
 
            'active': _fd['active'], 'admin': False
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 
            new_user = User()
 
            for k, v in form_data.items():
 
                if k == 'password':
 
                    v = get_crypt_password(v)
 
                if k == 'firstname':
 
                    k = 'name'
 
                setattr(new_user, k, v)
 

	
 
            new_user.api_key = generate_api_key(form_data['username'])
 
            self.sa.add(new_user)
 

	
 
            log_create_user(new_user.get_dict(), cur_user)
 
            return new_user
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_or_update(self, username, password, email, firstname='',
 
                         lastname='', active=True, admin=False,
 
                         extern_type=None, extern_name=None, cur_user=None):
 
        """
 
        Creates a new instance if not found, or updates current one
 

	
 
        :param username:
 
        :param password:
 
        :param email:
 
        :param active:
 
        :param firstname:
 
        :param lastname:
 
        :param active:
 
        :param admin:
 
        :param extern_name:
 
        :param extern_type:
 
        :param cur_user:
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.auth import get_crypt_password, check_password
 
        from kallithea.lib.hooks import log_create_user, check_allowed_create_user
 
        user_data = {
 
            'username': username, 'password': password,
 
            'email': email, 'firstname': firstname, 'lastname': lastname,
 
            'active': active, 'admin': admin
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 

	
 
        log.debug('Checking for %s account in Kallithea database' % username)
 
        user = User.get_by_username(username, case_insensitive=True)
 
        if user is None:
 
            log.debug('creating new user %s' % username)
 
            new_user = User()
 
            edit = False
 
        else:
 
            log.debug('updating user %s' % username)
 
            new_user = user
 
            edit = True
 

	
 
        try:
 
            new_user.username = username
 
            new_user.admin = admin
 
            new_user.email = email
 
            new_user.active = active
 
            new_user.extern_name = safe_unicode(extern_name) if extern_name else None
 
            new_user.extern_type = safe_unicode(extern_type) if extern_type else None
 
            new_user.name = firstname
 
            new_user.lastname = lastname
 

	
 
            if not edit:
 
                new_user.api_key = generate_api_key(username)
 

	
 
            # set password only if creating an user or password is changed
 
            password_change = new_user.password and not check_password(password,
 
                                                            new_user.password)
 
            if not edit or password_change:
 
                reason = 'new password' if edit else 'new user'
 
                log.debug('Updating password reason=>%s' % (reason,))
 
                new_user.password = get_crypt_password(password) if password else None
 

	
 
            self.sa.add(new_user)
 

	
 
            if not edit:
 
                log_create_user(new_user.get_dict(), cur_user)
 
            return new_user
 
        except (DatabaseError,):
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_registration(self, form_data):
 
        from kallithea.model.notification import NotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        try:
 
            form_data['admin'] = False
 
            form_data['extern_name'] = EXTERN_TYPE_INTERNAL
 
            form_data['extern_type'] = EXTERN_TYPE_INTERNAL
 
            new_user = self.create(form_data)
 

	
 
            self.sa.add(new_user)
 
            self.sa.flush()
 

	
 
            # notification to admins
 
            subject = _('New user registration')
 
            body = ('New user registration\n'
 
                    '---------------------\n'
 
                    '- Username: %s\n'
 
                    '- Full Name: %s\n'
 
                    '- Email: %s\n')
 
            body = body % (new_user.username, new_user.full_name, new_user.email)
 
            edit_url = url('edit_user', id=new_user.user_id, qualified=True)
 
            edit_url = h.canonical_url('edit_user', id=new_user.user_id)
 
            email_kwargs = {'registered_user_url': edit_url, 'new_username': new_user.username}
 
            NotificationModel().create(created_by=new_user, subject=subject,
 
                                       body=body, recipients=None,
 
                                       type_=Notification.TYPE_REGISTRATION,
 
                                       email_kwargs=email_kwargs)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update(self, user_id, form_data, skip_attrs=[]):
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 
            user = self.get(user_id, cache=False)
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                                _("You can't Edit this user since it's "
 
                                  "crucial for entire application"))
 

	
 
            for k, v in form_data.items():
 
                if k in skip_attrs:
 
                    continue
 
                if k == 'new_password' and v:
 
                    user.password = get_crypt_password(v)
 
                else:
 
                    # old legacy thing orm models store firstname as name,
 
                    # need proper refactor to username
 
                    if k == 'firstname':
 
                        k = 'name'
 
                    setattr(user, k, v)
 
            self.sa.add(user)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update_user(self, user, **kwargs):
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 
            user = self._get_user(user)
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                    _("You can't Edit this user since it's"
 
                      " crucial for entire application")
 
                )
 

	
 
            for k, v in kwargs.items():
 
                if k == 'password' and v:
 
                    v = get_crypt_password(v)
 

	
 
                setattr(user, k, v)
 
            self.sa.add(user)
 
            return user
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def delete(self, user, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        user = self._get_user(user)
 

	
 
        try:
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                    _(u"You can't remove this user since it's"
 
                      " crucial for entire application")
 
                )
 
            if user.repositories:
 
                repos = [x.repo_name for x in user.repositories]
 
                raise UserOwnsReposException(
 
                    _(u'user "%s" still owns %s repositories and cannot be '
 
                      'removed. Switch owners or remove those repositories. %s')
 
                    % (user.username, len(repos), ', '.join(repos))
 
                )
 
            self.sa.delete(user)
 

	
 
            from kallithea.lib.hooks import log_delete_user
 
            log_delete_user(user.get_dict(), cur_user)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def reset_password_link(self, data):
 
        from kallithea.lib.celerylib import tasks, run_task
 
        from kallithea.model.notification import EmailNotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        user_email = data['email']
 
        try:
 
            user = User.get_by_email(user_email)
 
            if user:
 
                log.debug('password reset user found %s' % user)
 
                link = url('reset_password_confirmation', key=user.api_key,
 
                           qualified=True)
 
                link = h.canonical_url('reset_password_confirmation', key=user.api_key)
 
                reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
 
                body = EmailNotificationModel().get_email_tmpl(reg_type,
 
                                                               user=user.short_contact,
 
                                                               reset_url=link)
 
                log.debug('sending email')
 
                run_task(tasks.send_email, [user_email],
 
                         _("Password reset link"), body, body)
 
                log.info('send new password mail to %s' % user_email)
 
            else:
 
                log.debug("password reset email %s not found" % user_email)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return False
 

	
 
        return True
 

	
 
    def reset_password(self, data):
 
        from kallithea.lib.celerylib import tasks, run_task
 
        from kallithea.lib import auth
 
        user_email = data['email']
 
        pre_db = True
 
        try:
 
            user = User.get_by_email(user_email)
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                            auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                Session().add(user)
 
                Session().commit()
 
                log.info('change password for %s' % user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 

	
 
            pre_db = False
 
            run_task(tasks.send_email, [user_email],
 
                     _('Your new password'),
 
                     _('Your new Kallithea password:%s') % (new_passwd,))
 
            log.info('send new password mail to %s' % user_email)
 

	
 
        except Exception:
 
            log.error('Failed to update user password')
 
            log.error(traceback.format_exc())
 
            if pre_db:
 
                # we rollback only if local db stuff fails. If it goes into
 
                # run_task, we're pass rollback state this wouldn't work then
 
                Session().rollback()
 

	
 
        return True
 

	
 
    def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
 
        """
 
        Fetches auth_user by user_id,or api_key if present.
 
        Fills auth_user attributes with those taken from database.
 
        Additionally set's is_authenitated if lookup fails
 
        present in database
 

	
 
        :param auth_user: instance of user to set attributes
 
        :param user_id: user id to fetch by
 
        :param api_key: api key to fetch by
 
        :param username: username to fetch by
 
        """
 
        if user_id is None and api_key is None and username is None:
 
            raise Exception('You need to pass user_id, api_key or username')
 

	
 
        try:
 
            dbuser = None
 
            if user_id:
 
                dbuser = self.get(user_id)
 
            elif api_key:
 
                dbuser = self.get_by_api_key(api_key)
 
            elif username:
 
                dbuser = self.get_by_username(username)
 

	
 
            if dbuser is not None and dbuser.active:
 
                log.debug('filling %s data' % dbuser)
 
                for k, v in dbuser.get_dict().iteritems():
 
                    if k not in ['api_keys', 'permissions']:
 
                        setattr(auth_user, k, v)
 
            else:
 
                return False
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            auth_user.is_authenticated = False
 
            return False
 

	
 
        return True
 

	
 
    def has_perm(self, user, perm):
 
        perm = self._get_perm(perm)
 
        user = self._get_user(user)
 

	
 
        return UserToPerm.query().filter(UserToPerm.user == user)\
 
            .filter(UserToPerm.permission == perm).scalar() is not None
 

	
 
    def grant_perm(self, user, perm):
 
        """
 
        Grant user global permissions
 

	
 
        :param user:
 
        :param perm:
 
        """
 
        user = self._get_user(user)
 
        perm = self._get_perm(perm)
 
        # if this permission is already granted skip it
 
        _perm = UserToPerm.query()\
 
            .filter(UserToPerm.user == user)\
 
            .filter(UserToPerm.permission == perm)\
 
            .scalar()
 
        if _perm:
 
            return
 
        new = UserToPerm()
 
        new.user = user
 
        new.permission = perm
 
        self.sa.add(new)
 
        return new
 

	
 
    def revoke_perm(self, user, perm):
 
        """
 
        Revoke users global permissions
 

	
 
        :param user:
 
        :param perm:
 
        """
 
        user = self._get_user(user)
 
        perm = self._get_perm(perm)
 

	
 
        obj = UserToPerm.query()\
 
                .filter(UserToPerm.user == user)\
 
                .filter(UserToPerm.permission == perm)\
 
                .scalar()
 
        if obj:
 
            self.sa.delete(obj)
 

	
 
    def add_extra_email(self, user, email):
 
        """
 
        Adds email address to UserEmailMap
 

	
 
        :param user:
 
        :param email:
 
        """
 
        from kallithea.model import forms
 
        form = forms.UserExtraEmailForm()()
 
        data = form.to_python(dict(email=email))
 
        user = self._get_user(user)
 

	
 
        obj = UserEmailMap()
 
        obj.user = user
 
        obj.email = data['email']
 
        self.sa.add(obj)
 
        return obj
 

	
 
    def delete_extra_email(self, user, email_id):
 
        """
 
        Removes email address from UserEmailMap
 

	
 
        :param user:
 
        :param email_id:
 
        """
 
        user = self._get_user(user)
 
        obj = UserEmailMap.query().get(email_id)
 
        if obj:
 
            self.sa.delete(obj)
 

	
 
    def add_extra_ip(self, user, ip):
 
        """
 
        Adds ip address to UserIpMap
 

	
 
        :param user:
 
        :param ip:
 
        """
 
        from kallithea.model import forms
 
        form = forms.UserExtraIpForm()()
 
        data = form.to_python(dict(ip=ip))
 
        user = self._get_user(user)
 

	
 
        obj = UserIpMap()
 
        obj.user = user
 
        obj.ip_addr = data['ip']
 
        self.sa.add(obj)
 
        return obj
 

	
 
    def delete_extra_ip(self, user, ip_id):
 
        """
 
        Removes ip address from UserIpMap
 

	
 
        :param user:
 
        :param ip_id:
 
        """
 
        user = self._get_user(user)
 
        obj = UserIpMap.query().get(ip_id)
 
        if obj:
kallithea/templates/base/base.html
Show inline comments
 
## -*- coding: utf-8 -*-
 
<%inherit file="root.html"/>
 

	
 
<!-- HEADER -->
 
<div id="header">
 
    <div id="header-inner" class="title">
 
        <div id="logo">
 
          <a href="${h.url('home')}" style="display: block;">
 
            <div class="header">
 
                <img src="${h.url('/images/kallithea-logo.svg')}" onerror="this.src='${h.url('/images/kallithea-logo.png')}'" alt="Kallithea"/>
 
            </div>
 
            %if c.site_name:
 
             <div class="branding">${c.site_name}</div>
 
            %endif
 
          </a>
 
        </div>
 
        <!-- MENU -->
 
        ${self.page_nav()}
 
        <!-- END MENU -->
 
        ${self.body()}
 
    </div>
 
</div>
 
<!-- END HEADER -->
 

	
 
<!-- CONTENT -->
 
<div id="content">
 
    ${self.flash_msg()}
 
    <div id="main">
 
        ${next.main()}
 
    </div>
 
</div>
 
<!-- END CONTENT -->
 

	
 
<!-- FOOTER -->
 
<div id="footer">
 
   <div id="footer-inner" class="title">
 
       <div>
 
           <p class="footer-link">
 
               ${_('Server instance: %s') % c.instance_id if c.instance_id else ''}
 
           </p>
 
           <p class="footer-link-right">
 
               This site is powered by
 
               %if c.visual.show_version:
 
                   <a href="${h.url('kallithea_project_url')}" target="_blank">Kallithea</a> ${c.kallithea_version},
 
               %else:
 
                   <a href="${h.url('kallithea_project_url')}" target="_blank">Kallithea</a>,
 
               %endif
 
               which is
 
               <a href="${h.url('about')}#copyright">&copy; 2010&ndash;2014 by various authors &amp; licensed under GPLv3</a>.
 
               <a href="${h.canonical_url('about')}#copyright">&copy; 2010&ndash;2014 by various authors &amp; licensed under GPLv3</a>.
 
               %if c.issues_url:
 
                   &ndash; <a href="${c.issues_url}" target="_blank">${_('Support')}</a>
 
               %endif
 
           </p>
 
       </div>
 
   </div>
 
</div>
 

	
 
<!-- END FOOTER -->
 

	
 
### MAKO DEFS ###
 

	
 
<%def name="flash_msg()">
 
    <%include file="/base/flash_msg.html"/>
 
</%def>
 

	
 
<%def name="breadcrumbs()">
 
    <div class="breadcrumbs">
 
    ${self.breadcrumbs_links()}
 
    </div>
 
</%def>
 

	
 
<%def name="admin_menu()">
 
  <ul class="admin_menu">
 
      <li><a href="${h.url('admin_home')}"><i class="icon-book"></i> ${_('Admin journal')}</a></li>
 
      <li><a href="${h.url('repos')}"><i class="icon-archive"></i> ${_('Repositories')}</a></li>
 
      <li><a href="${h.url('repos_groups')}"><i class="icon-folder-close"></i> ${_('Repository groups')}</a></li>
 
      <li><a href="${h.url('users')}"><i class="icon-user"></i> ${_('Users')}</a></li>
 
      <li><a href="${h.url('users_groups')}"><i class="icon-group"></i> ${_('User groups')}</a></li>
 
      <li><a href="${h.url('admin_permissions')}"><i class="icon-ban-circle"></i> ${_('Permissions')}</a></li>
 
      <li><a href="${h.url('auth_home')}"><i class="icon-key"></i> ${_('Authentication')}</a></li>
 
      <li><a href="${h.url('defaults')}"><i class="icon-wrench"></i> ${_('Defaults')}</a></li>
 
      <li class="last"><a href="${h.url('admin_settings')}"><i class="icon-cog"></i> ${_('Settings')}</a></li>
 
  </ul>
 

	
 
</%def>
 

	
 

	
 
## admin menu used for people that have some admin resources
 
<%def name="admin_menu_simple(repositories=None, repository_groups=None, user_groups=None)">
 
  <ul>
 
   %if repositories:
 
      <li><a href="${h.url('repos')}"><i class="icon-archive"></i> ${_('Repositories')}</a></li>
 
   %endif
 
   %if repository_groups:
 
      <li><a href="${h.url('repos_groups')}"><i class="icon-folder-close"></i> ${_('Repository groups')}</a></li>
 
   %endif
 
   %if user_groups:
 
      <li><a href="${h.url('users_groups')}"><i class="icon-group"></i> ${_('User groups')}</a></li>
 
   %endif
 
  </ul>
 
</%def>
 

	
 
<%def name="repo_context_bar(current=None, rev=None)">
 
  <% rev = None if rev == 'tip' else rev %>
 
  <%
 
      def follow_class():
 
          if c.repository_following:
 
              return h.literal('following')
 
          else:
 
              return h.literal('follow')
 
  %>
 
  <%
 
    def is_current(selected):
 
        if selected == current:
 
            return h.literal('class="current"')
 
    %>
 

	
 
  <!--- CONTEXT BAR -->
 
  <div id="context-bar" class="box">
 
      <h2>
 
        %if h.is_hg(c.db_repo):
 
          <i class="icon-hg" style="color: #576622; font-size: 24px"></i>
 
        %endif
 
        %if h.is_git(c.db_repo):
 
          <i class="icon-git" style="color: #e85634; font-size: 24px"></i>
 
        %endif
 

	
 
        ## public/private
 
        %if c.db_repo.private:
 
          <i class="icon-lock"></i>
 
        %else:
 
          <i class="icon-unlock-alt"></i>
 
        %endif
 
        ${h.repo_link(c.db_repo.groups_and_repo)}
 

	
 
        %if current == 'createfork':
 
         - ${_('Create fork')}
 
        %endif
 
      </h2>
 
      <!--
 
      <div id="breadcrumbs">
 
        ${h.link_to(_(u'Repositories'),h.url('home'))}
 
        &raquo;
 
        ${h.repo_link(c.db_repo.groups_and_repo)}
 
      </div>
 
      -->
 
      <ul id="context-pages" class="horizontal-list">
 
        <li ${is_current('summary')}><a href="${h.url('summary_home', repo_name=c.repo_name)}"><i class="icon-file-text"></i> ${_('Summary')}</a></li>
 
        %if rev:
 
        <li ${is_current('changelog')}><a href="${h.url('changelog_file_home', repo_name=c.repo_name, revision=rev, f_path='')}"><i class="icon-time"></i> ${_('Changelog')}</a></li>
 
        %else:
 
        <li ${is_current('changelog')}><a href="${h.url('changelog_home', repo_name=c.repo_name)}"><i class="icon-time"></i> ${_('Changelog')}</a></li>
 
        %endif
 
        <li ${is_current('files')}><a href="${h.url('files_home', repo_name=c.repo_name, revision=rev or 'tip')}"><i class="icon-file"></i> ${_('Files')}</a></li>
 
        <li ${is_current('switch-to')}>
 
          <a href="#" id="branch_tag_switcher_2" class="dropdown"><i class="icon-random"></i> ${_('Switch To')}</a>
 
          <ul id="switch_to_list_2" class="switch_to submenu">
 
            <li><a href="#">${_('Loading...')}</a></li>
 
          </ul>
 
        </li>
 
        <li ${is_current('options')}>
 
             %if h.HasRepoPermissionAll('repository.admin')(c.repo_name):
 
               <a href="${h.url('edit_repo',repo_name=c.repo_name)}" class="dropdown"><i class="icon-cogs"></i> ${_('Options')}</a>
 
             %else:
 
               <a href="#" class="dropdown"><i class="icon-cogs"></i> ${_('Options')}</a>
 
             %endif
 
          <ul>
 
             %if h.HasRepoPermissionAll('repository.admin')(c.repo_name):
 
                   <li><a href="${h.url('edit_repo',repo_name=c.repo_name)}"><i class="icon-cog"></i> ${_('Settings')}</a></li>
 
             %endif
 
              %if c.db_repo.fork:
 
               <li><a href="${h.url('compare_url',repo_name=c.db_repo.fork.repo_name,org_ref_type=c.db_repo.landing_rev[0],org_ref_name=c.db_repo.landing_rev[1], other_repo=c.repo_name,other_ref_type='branch' if request.GET.get('branch') else c.db_repo.landing_rev[0],other_ref_name=request.GET.get('branch') or c.db_repo.landing_rev[1], merge=1)}">
 
                   <i class="icon-loop"></i> ${_('Compare fork')}</a></li>
 
              %endif
 
              <li><a href="${h.url('compare_home',repo_name=c.repo_name)}"><i class="icon-loop"></i> ${_('Compare')}</a></li>
 

	
 
              <li><a href="${h.url('search_repo',repo_name=c.repo_name)}"><i class="icon-search"></i> ${_('Search')}</a></li>
 

	
 
              %if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name) and c.db_repo.enable_locking:
 
                %if c.db_repo.locked[0]:
 
                  <li>${h.link_to(_('Unlock'), h.url('toggle_locking',repo_name=c.repo_name),class_='locking_del')}</li>
 
                %else:
 
                  <li>${h.link_to(_('Lock'), h.url('toggle_locking',repo_name=c.repo_name),class_='locking_add')}</li>
 
                %endif
 
              %endif
 
              ## TODO: this check feels wrong, it would be better to have a check for permissions
 
              ## also it feels like a job for the controller
 
              %if c.authuser.username != 'default':
 
                  <li>
 
                   <a class="${follow_class()}" onclick="javascript:toggleFollowingRepo(this,${c.db_repo.repo_id},'${str(h.get_token())}');">
 
                    <span class="show-follow"><i class="icon-heart-empty"></i> ${_('Follow')}</span>
 
                    <span class="show-following"><i class="icon-heart"></i> ${_('Unfollow')}</span>
 
                  </a>
 
                  </li>
 
                  <li><a href="${h.url('repo_fork_home',repo_name=c.repo_name)}"><i class="icon-code-fork"></i> ${_('Fork')}</a></li>
 
                  %if h.is_hg(c.db_repo_scm_instance):
 
                  <li><a href="${h.url('pullrequest_home',repo_name=c.repo_name)}"><i class="icon-code-fork"></i> ${_('Create Pull Request')}</a></li>
 
                  %endif
 
              %endif
 
             </ul>
 
        </li>
 
        <li ${is_current('showpullrequest')}>
 
          <a href="${h.url('pullrequest_show_all',repo_name=c.repo_name)}" title="${_('Show Pull Requests for %s') % c.repo_name}"> <i class="icon-code-fork"></i> ${_('Pull Requests')}
 
            %if c.repository_pull_requests:
 
              <span>${c.repository_pull_requests}</span>
 
            %endif
 
          </a>
 
        </li>
 
      </ul>
 
  </div>
 
  <script type="text/javascript">
 
      YUE.on('branch_tag_switcher_2','mouseover',function(){
 
         var loaded = YUD.hasClass('branch_tag_switcher_2','loaded');
 
         if(!loaded){
 
             YUD.addClass('branch_tag_switcher_2','loaded');
 
             ypjax("${h.url('branch_tag_switcher',repo_name=c.repo_name)}",'switch_to_list_2',
 
                 function(o){},
 
                 function(o){YUD.removeClass('branch_tag_switcher_2','loaded');}
 
                 ,null);
 
         }
 
         return false;
 
      });
 
  </script>
 
  <!--- END CONTEXT BAR -->
 
</%def>
 

	
 
<%def name="menu(current=None)">
 
  <%
 
  def is_current(selected):
 
      if selected == current:
 
          return h.literal('class="current"')
 
  %>
 

	
 
  <ul id="quick" class="horizontal-list">
 
    <!-- repo switcher -->
 
    <li ${is_current('repositories')}>
 
      <input id="repo_switcher" name="repo_switcher" type="hidden">
 
    </li>
 

	
 
    ##ROOT MENU
 
    %if c.authuser.username != 'default':
0 comments (0 inline, 0 general)