Changeset - f9bc28c44f30
[Not reviewed]
default
0 19 0
Mads Kiilerich - 11 years ago 2014-08-12 13:08:23
madski@unity3d.com
urls: introduce canonical_url config setting

All URLs that are shown or persisted or emailed will use this instead of the
current url.

This is convenient when the server has multiple names - for instance when
transitioning from one protocol or domain or hostname to another.
5 files changed:
0 comments (0 inline, 0 general)
kallithea/controllers/feed.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.feed
 
~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Feed controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 23, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 

	
 
from pylons import url, response, tmpl_context as c
 
from pylons import response, tmpl_context as c
 
from pylons.i18n.translation import _
 

	
 
from beaker.cache import cache_region, region_invalidate
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from kallithea.lib.base import BaseRepoController
 
from kallithea.lib.diffs import DiffProcessor, LimitedDiffContainer
 
from kallithea.model.db import CacheInvalidation
 
from kallithea.lib.utils2 import safe_int, str2bool, safe_unicode
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FeedController(BaseRepoController):
 

	
 
    @LoginRequired(api_access=True)
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def __before__(self):
 
        super(FeedController, self).__before__()
 
        #common values for feeds
 
        self.description = _('Changes on %s repository')
 
        self.title = self.title = _('%s %s feed') % (c.site_name, '%s')
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        import kallithea
 
        CONF = kallithea.CONFIG
 
        self.include_diff = str2bool(CONF.get('rss_include_diff', False))
 
        self.feed_nr = safe_int(CONF.get('rss_items_per_page', 20))
 
        # we need to protect from parsing huge diffs here other way
 
        # we can kill the server
 
        self.feed_diff_limit = safe_int(CONF.get('rss_cut_off_limit', 32 * 1024))
 

	
 
    def _get_title(self, cs):
 
        return "%s" % (
 
            h.shorter(cs.message, 160)
 
        )
 

	
 
    def __changes(self, cs):
 
        changes = []
 
        diff_processor = DiffProcessor(cs.diff(),
 
                                       diff_limit=self.feed_diff_limit)
 
        _parsed = diff_processor.prepare(inline_diff=False)
 
        limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            limited_diff = True
 

	
 
        for st in _parsed:
 
            st.update({'added': st['stats']['added'],
 
                       'removed': st['stats']['deleted']})
 
            changes.append('\n %(operation)s %(filename)s '
 
                           '(%(added)s lines added, %(removed)s lines removed)'
 
                            % st)
 
        if limited_diff:
 
            changes = changes + ['\n ' +
 
                                 _('Changeset was too big and was cut off...')]
 
        return diff_processor, changes
 

	
 
    def __get_desc(self, cs):
 
        desc_msg = [(_('%s committed on %s')
 
                     % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
 
        #branches, tags, bookmarks
 
        if cs.branch:
 
            desc_msg.append('branch: %s<br/>' % cs.branch)
 
        if h.is_hg(c.db_repo_scm_instance):
 
            for book in cs.bookmarks:
 
                desc_msg.append('bookmark: %s<br/>' % book)
 
        for tag in cs.tags:
 
            desc_msg.append('tag: %s<br/>' % tag)
 
        diff_processor, changes = self.__changes(cs)
 
        # rev link
 
        _url = url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id, qualified=True)
 
        _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id)
 
        desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
 

	
 
        desc_msg.append('<pre>')
 
        desc_msg.append(cs.message)
 
        desc_msg.append('\n')
 
        desc_msg.extend(changes)
 
        if self.include_diff:
 
            desc_msg.append('\n\n')
 
            desc_msg.append(diff_processor.as_raw())
 
        desc_msg.append('</pre>')
 
        return map(safe_unicode, desc_msg)
 

	
 
    def atom(self, repo_name):
 
        """Produce an atom-1.0 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term')
 
        def _get_feed_from_cache(key, kind):
 
            feed = Atom1Feed(
 
                 title=self.title % repo_name,
 
                 link=url('summary_home', repo_name=repo_name,
 
                          qualified=True),
 
                 link=h.canonical_url('summary_home', repo_name=repo_name),
 
                 description=self.description % repo_name,
 
                 language=self.language,
 
                 ttl=self.ttl
 
            )
 

	
 
            for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id, qualified=True),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                              )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'ATOM'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
 
        return _get_feed_from_cache(repo_name, kind)
 

	
 
    def rss(self, repo_name):
 
        """Produce an rss2 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term')
 
        def _get_feed_from_cache(key, kind):
 
            feed = Rss201rev2Feed(
 
                title=self.title % repo_name,
 
                link=url('summary_home', repo_name=repo_name,
 
                         qualified=True),
 
                link=h.canonical_url('summary_home', repo_name=repo_name),
 
                description=self.description % repo_name,
 
                language=self.language,
 
                ttl=self.ttl
 
            )
 

	
 
            for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id, qualified=True),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                             )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'RSS'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
 
        return _get_feed_from_cache(repo_name, kind)
kallithea/controllers/journal.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.journal
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Journal controller for pylons
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
from itertools import groupby
 

	
 
from sqlalchemy import or_
 
from sqlalchemy.orm import joinedload
 
from sqlalchemy.sql.expression import func
 

	
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from webob.exc import HTTPBadRequest
 
from pylons import request, tmpl_context as c, response, url
 
from pylons.i18n.translation import _
 

	
 
from kallithea.controllers.admin.admin import _journal_filter
 
from kallithea.model.db import UserLog, UserFollowing, Repository, User
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
import kallithea.lib.helpers as h
 
from kallithea.lib.helpers import Page
 
from kallithea.lib.auth import LoginRequired, NotAnonymous
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.utils2 import safe_int, AttributeDict
 
from kallithea.lib.compat import json
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class JournalController(BaseController):
 

	
 
    def __before__(self):
 
        super(JournalController, self).__before__()
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        self.feed_nr = 20
 
        c.search_term = request.GET.get('filter')
 

	
 
    def _get_daily_aggregate(self, journal):
 
        groups = []
 
        for k, g in groupby(journal, lambda x: x.action_as_day):
 
            user_group = []
 
            #groupby username if it's a present value, else fallback to journal username
 
            for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username):
 
                l = list(g2)
 
                user_group.append((l[0].user, l))
 

	
 
            groups.append((k, user_group,))
 

	
 
        return groups
 

	
 
    def _get_journal_data(self, following_repos):
 
        repo_ids = [x.follows_repository.repo_id for x in following_repos
 
                    if x.follows_repository is not None]
 
        user_ids = [x.follows_user.user_id for x in following_repos
 
                    if x.follows_user is not None]
 

	
 
        filtering_criterion = None
 

	
 
        if repo_ids and user_ids:
 
            filtering_criterion = or_(UserLog.repository_id.in_(repo_ids),
 
                        UserLog.user_id.in_(user_ids))
 
        if repo_ids and not user_ids:
 
            filtering_criterion = UserLog.repository_id.in_(repo_ids)
 
        if not repo_ids and user_ids:
 
            filtering_criterion = UserLog.user_id.in_(user_ids)
 
        if filtering_criterion is not None:
 
            journal = self.sa.query(UserLog)\
 
                .options(joinedload(UserLog.user))\
 
                .options(joinedload(UserLog.repository))
 
            #filter
 
            try:
 
                journal = _journal_filter(journal, c.search_term)
 
            except Exception:
 
                # we want this to crash for now
 
                raise
 
            journal = journal.filter(filtering_criterion)\
 
                        .order_by(UserLog.action_date.desc())
 
        else:
 
            journal = []
 

	
 
        return journal
 

	
 
    def _atom_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = url('public_journal_atom', qualified=True)
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'atom feed')
 
        else:
 
            _link = url('journal_atom', qualified=True)
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'atom feed')
 

	
 
        feed = Atom1Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = url('changelog_home',
 
                           repo_name=entry.repository.repo_name,
 
                           qualified=True)
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or url('', qualified=True),
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    def _rss_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = url('public_journal_atom', qualified=True)
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'rss feed')
 
        else:
 
            _link = url('journal_atom', qualified=True)
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'rss feed')
 

	
 
        feed = Rss201rev2Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = url('changelog_home',
 
                           repo_name=entry.repository.repo_name,
 
                           qualified=True)
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or url('', qualified=True),
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def index(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 
        c.user = User.get(self.authuser.user_id)
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator)
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 

	
 
        repos_list = Session().query(Repository)\
 
                     .filter(Repository.user_id ==
 
                             self.authuser.user_id)\
 
                     .order_by(func.lower(Repository.repo_name)).all()
 

	
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list,
 
                                                   admin=True)
 
        #json used to render the grid
 
        c.data = json.dumps(repos_data)
 

	
 
        watched_repos_data = []
 

	
 
        ## watched repos
 
        _render = RepoModel._render_datatable
 

	
 
        def quick_menu(repo_name):
 
            return _render('quick_menu', repo_name)
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=False, admin=False)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            from pylons import tmpl_context as c
 
            if c.visual.stylify_metatags:
 
                return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
 
            else:
 
                return h.urlify_text(h.truncate(desc, 60))
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name)
 

	
 
        def owner_actions(user_id, username):
 
            return _render('user_name', user_id, username)
 

	
 
        def toogle_follow(repo_id):
 
            return  _render('toggle_follow', repo_id)
 

	
 
        for entry in c.following:
 
            repo = entry.follows_repository
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "menu": quick_menu(repo.repo_name),
 
                "raw_name": repo.repo_name.lower(),
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "action": toogle_follow(repo.repo_id)
 
            }
 

	
 
            watched_repos_data.append(row)
 

	
 
        c.watched_data = json.dumps({
 
            "totalRecords": len(c.following),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": watched_repos_data
 
        })
 
        return render('journal/journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._atom_feed(following, public=False)
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_rss(self):
 
        """
 
        Produce an rss feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._rss_feed(following, public=False)
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def toggle_following(self):
 
        cur_token = request.POST.get('auth_token')
 
        token = h.get_token()
 
        if cur_token == token:
 

	
 
            user_id = request.POST.get('follows_user_id')
 
            if user_id:
 
                try:
 
                    self.scm_model.toggle_following_user(user_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    raise HTTPBadRequest()
 

	
 
            repo_id = request.POST.get('follows_repo_id')
 
            if repo_id:
 
                try:
 
                    self.scm_model.toggle_following_repo(repo_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    raise HTTPBadRequest()
 

	
 
        log.debug('token mismatch %s vs %s' % (cur_token, token))
 
        raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    def public_journal(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20)
 

	
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 
        return render('journal/public_journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        return self._atom_feed(c.following)
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_rss(self):
 
        """
 
        Produce an rss2 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        return self._rss_feed(c.following)
kallithea/controllers/pullrequests.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.pullrequests
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
pull requests controller for Kallithea for initializing pull requests
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 7, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
import formencode
 
import re
 

	
 
from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
 

	
 
from pylons import request, tmpl_context as c, url
 
from pylons.controllers.util import redirect
 
from pylons.i18n.translation import _
 

	
 
from kallithea.lib.compat import json
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\
 
    NotAnonymous
 
from kallithea.lib.helpers import Page
 
from kallithea.lib import helpers as h
 
from kallithea.lib import diffs
 
from kallithea.lib.utils import action_logger, jsonify
 
from kallithea.lib.vcs.utils import safe_str
 
from kallithea.lib.vcs.exceptions import EmptyRepositoryError
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.model.db import  PullRequest, ChangesetStatus, ChangesetComment,\
 
    PullRequestReviewers
 
from kallithea.model.pull_request import PullRequestModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.forms import PullRequestForm, PullRequestPostForm
 
from kallithea.lib.utils2 import safe_int
 
from kallithea.controllers.changeset import _ignorews_url,\
 
    _context_url, get_line_ctx, get_ignore_ws
 
from kallithea.controllers.compare import CompareController
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PullrequestsController(BaseRepoController):
 

	
 
    def __before__(self):
 
        super(PullrequestsController, self).__before__()
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 

	
 
    def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
 
        """return a structure with repo's interesting changesets, suitable for
 
        the selectors in pullrequest.html
 

	
 
        rev: a revision that must be in the list somehow and selected by default
 
        branch: a branch that must be in the list and selected by default - even if closed
 
        branch_rev: a revision of which peers should be preferred and available."""
 
        # list named branches that has been merged to this named branch - it should probably merge back
 
        peers = []
 

	
 
        if rev:
 
            rev = safe_str(rev)
 

	
 
        if branch:
 
            branch = safe_str(branch)
 

	
 
        if branch_rev:
 
            branch_rev = safe_str(branch_rev)
 
            # a revset not restricting to merge() would be better
 
            # (especially because it would get the branch point)
 
            # ... but is currently too expensive
 
            # including branches of children could be nice too
 
            peerbranches = set()
 
            for i in repo._repo.revs(
 
                "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
 
                branch_rev, branch_rev):
 
                abranch = repo.get_changeset(i).branch
 
                if abranch not in peerbranches:
 
                    n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id)
 
                    peers.append((n, abranch))
 
                    peerbranches.add(abranch)
 

	
 
        selected = None
 
        tiprev = repo.tags.get('tip')
 
        tipbranch = None
 

	
 
        branches = []
 
        for abranch, branchrev in repo.branches.iteritems():
 
            n = 'branch:%s:%s' % (abranch, branchrev)
 
            desc = abranch
 
            if branchrev == tiprev:
 
                tipbranch = abranch
 
                desc = '%s (current tip)' % desc
 
            branches.append((n, desc))
 
            if rev == branchrev:
 
                selected = n
 
            if branch == abranch:
 
                if not rev:
 
                    selected = n
 
                branch = None
 
        if branch:  # branch not in list - it is probably closed
 
            branchrev = repo.closed_branches.get(branch)
 
            if branchrev:
 
                n = 'branch:%s:%s' % (branch, branchrev)
 
                branches.append((n, _('%s (closed)') % branch))
 
                selected = n
 
                branch = None
 
            if branch:
 
                log.error('branch %r not found in %s', branch, repo)
 

	
 
        bookmarks = []
 
        for bookmark, bookmarkrev in repo.bookmarks.iteritems():
 
            n = 'book:%s:%s' % (bookmark, bookmarkrev)
 
            bookmarks.append((n, bookmark))
 
            if rev == bookmarkrev:
 
                selected = n
 

	
 
        tags = []
 
        for tag, tagrev in repo.tags.iteritems():
 
            if tag == 'tip':
 
                continue
 
            n = 'tag:%s:%s' % (tag, tagrev)
 
            tags.append((n, tag))
 
            if rev == tagrev:
 
                selected = n
 

	
 
        # prio 1: rev was selected as existing entry above
 

	
 
        # prio 2: create special entry for rev; rev _must_ be used
 
        specials = []
 
        if rev and selected is None:
 
            selected = 'rev:%s:%s' % (rev, rev)
 
            specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))]
 

	
 
        # prio 3: most recent peer branch
 
        if peers and not selected:
 
            selected = peers[0][0][0]
 

	
 
        # prio 4: tip revision
 
        if not selected:
 
            if h.is_hg(repo):
 
                if tipbranch:
 
                    selected = 'branch:%s:%s' % (tipbranch, tiprev)
 
                else:
 
                    selected = 'tag:null:0'
 
                    tags.append((selected, 'null'))
 
            else:
 
                if 'master' in repo.branches:
 
                    selected = 'branch:master:%s' % repo.branches['master']
 
                else:
 
                    k, v = repo.branches.items()[0]
 
                    selected = 'branch:%s:%s' % (k, v)
 

	
 
        groups = [(specials, _("Special")),
 
                  (peers, _("Peer branches")),
 
                  (bookmarks, _("Bookmarks")),
 
                  (branches, _("Branches")),
 
                  (tags, _("Tags")),
 
                  ]
 
        return [g for g in groups if g[0]], selected
 

	
 
    def _get_is_allowed_change_status(self, pull_request):
 
        if pull_request.is_closed():
 
            return False
 

	
 
        owner = self.authuser.user_id == pull_request.user_id
 
        reviewer = self.authuser.user_id in [x.user_id for x in
 
                                                   pull_request.reviewers]
 
        return self.authuser.admin or owner or reviewer
 

	
 
    def _load_compare_data(self, pull_request, enable_comments=True):
 
        """
 
        Load context data needed for generating compare diff
 

	
 
        :param pull_request:
 
        """
 
        c.org_repo = pull_request.org_repo
 
        (c.org_ref_type,
 
         c.org_ref_name,
 
         c.org_rev) = pull_request.org_ref.split(':')
 

	
 
        c.other_repo = pull_request.other_repo
 
        (c.other_ref_type,
 
         c.other_ref_name,
 
         c.other_rev) = pull_request.other_ref.split(':') # other_rev is ancestor
 

	
 
        org_scm_instance = c.org_repo.scm_instance # property with expensive cache invalidation check!!!
 
        c.cs_repo = c.org_repo
 
        c.cs_ranges = [org_scm_instance.get_changeset(x) for x in pull_request.revisions]
 
        c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ...
 
        revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
 
        c.jsdata = json.dumps(graph_data(org_scm_instance, revs))
 

	
 
        c.available = []
 
        c.org_branch_name = c.org_ref_name
 
        other_scm_instance = c.other_repo.scm_instance
 
        if org_scm_instance.alias == 'hg' and c.other_ref_name != 'ancestor':
 
            if c.org_ref_type != 'branch':
 
                c.org_branch_name = org_scm_instance.get_changeset(c.org_ref_name).branch # use ref_type ?
 
            other_branch_name = c.other_ref_name
 
            if c.other_ref_type != 'branch':
 
                other_branch_name = other_scm_instance.get_changeset(c.other_ref_name).branch # use ref_type ?
 
            # candidates: descendants of old head that are on the right branch
 
            #             and not are the old head itself ...
 
            #             and nothing at all if old head is a descendent of target ref name
 
            if other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, other_branch_name):
 
                c.update_msg = _('This pull request has already been merged to %s.') % other_branch_name
 
            else: # look for children of PR head on source branch in org repo
 
                arevs = org_scm_instance._repo.revs('%s:: & branch(%s) - %s',
 
                                                    revs[0], c.org_branch_name, revs[0])
 
                if arevs:
 
                    if c.pull_request.is_closed():
 
                        c.update_msg = _('This pull request has been closed and can not be updated with descendent changes on %s:') % c.org_branch_name
 
                    else:
 
                        c.update_msg = _('This pull request can be updated with descendent changes on %s:') % c.org_branch_name
 
                    c.available = [org_scm_instance.get_changeset(x) for x in arevs]
 
                else:
 
                    c.update_msg = _('No changesets found for updating this pull request.')
 

	
 
            revs = org_scm_instance._repo.revs('head() & not (%s::) & branch(%s) & !closed()', revs[0], c.org_branch_name)
 
            if revs:
 
                c.update_msg_other = _('Note: Branch %s also contains unrelated changes, such as %s.') % (c.org_branch_name,
 
                    h.short_id(org_scm_instance.get_changeset((max(revs))).raw_id))
 
            else:
 
                c.update_msg_other = _('Branch %s does not contain other changes.') % c.org_branch_name
 

	
 
        raw_ids = [x.raw_id for x in c.cs_ranges]
 
        c.cs_comments = c.org_repo.get_comments(raw_ids)
 
        c.statuses = c.org_repo.statuses(raw_ids)
 

	
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = request.GET.get('context', 3)
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.fulldiff = request.GET.get('fulldiff')
 
        diff_limit = self.cut_off_limit if not c.fulldiff else None
 

	
 
        # we swap org/other ref since we run a simple diff on one repo
 
        log.debug('running diff between %s and %s in %s'
 
                  % (c.other_rev, c.org_rev, org_scm_instance.path))
 
        txtdiff = org_scm_instance.get_diff(rev1=safe_str(c.other_rev), rev2=safe_str(c.org_rev),
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 

	
 
        diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff',
 
                                             diff_limit=diff_limit)
 
        _parsed = diff_processor.prepare()
 

	
 
        c.limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            c.limited_diff = True
 

	
 
        c.files = []
 
        c.changes = {}
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 

	
 
        for f in _parsed:
 
            st = f['stats']
 
            c.lines_added += st['added']
 
            c.lines_deleted += st['deleted']
 
            fid = h.FID('', f['filename'])
 
            c.files.append([fid, f['operation'], f['filename'], f['stats']])
 
            htmldiff = diff_processor.as_html(enable_comments=enable_comments,
 
                                              parsed_lines=[f])
 
            c.changes[fid] = [f['operation'], f['filename'], htmldiff]
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def show_all(self, repo_name):
 
        c.from_ = request.GET.get('from_') or ''
 
        c.closed = request.GET.get('closed') or ''
 
        c.pull_requests = PullRequestModel().get_all(repo_name, from_=c.from_, closed=c.closed)
 
        c.repo_name = repo_name
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=10)
 

	
 
        c.pullrequest_data = render('/pullrequests/pullrequest_data.html')
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.pullrequest_data
 

	
 
        return render('/pullrequests/pullrequest_show_all.html')
 

	
 
    @LoginRequired()
 
    def show_my(self): # my_account_my_pullrequests
 
        c.show_closed = request.GET.get('pr_show_closed')
 
        return render('/pullrequests/pullrequest_show_my.html')
 

	
 
    @NotAnonymous()
 
    def show_my_data(self):
 
        c.show_closed = request.GET.get('pr_show_closed')
 

	
 
        def _filter(pr):
 
            s = sorted(pr, key=lambda o: o.created_on, reverse=True)
 
            if not c.show_closed:
 
                s = filter(lambda p: p.status != PullRequest.STATUS_CLOSED, s)
 
            return s
 

	
 
        c.my_pull_requests = _filter(PullRequest.query()\
 
                                .filter(PullRequest.user_id ==
 
                                        self.authuser.user_id)\
 
                                .all())
 

	
 
        c.participate_in_pull_requests = _filter(PullRequest.query()\
 
                                .join(PullRequestReviewers)\
 
                                .filter(PullRequestReviewers.user_id ==
 
                                        self.authuser.user_id)\
 
                                                 )
 

	
 
        return render('/pullrequests/pullrequest_show_my_data.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self):
 
        org_repo = c.db_repo
 

	
 
        try:
 
            org_repo.scm_instance.get_changeset()
 
        except EmptyRepositoryError, e:
 
            h.flash(h.literal(_('There are no changesets yet')),
 
                    category='warning')
 
            redirect(url('summary_home', repo_name=org_repo.repo_name))
 

	
 
        org_rev = request.GET.get('rev_end')
 
        # rev_start is not directly useful - its parent could however be used
 
        # as default for other and thus give a simple compare view
 
        #other_rev = request.POST.get('rev_start')
 
        branch = request.GET.get('branch')
 

	
 
        c.org_repos = []
 
        c.org_repos.append((org_repo.repo_name, org_repo.repo_name))
 
        c.default_org_repo = org_repo.repo_name
 
        c.org_refs, c.default_org_ref = self._get_repo_refs(org_repo.scm_instance, rev=org_rev, branch=branch)
 

	
 
        c.other_repos = []
 
        other_repos_info = {}
 

	
 
        def add_other_repo(repo, branch_rev=None):
 
            if repo.repo_name in other_repos_info: # shouldn't happen
 
                return
 
            c.other_repos.append((repo.repo_name, repo.repo_name))
 
            other_refs, selected_other_ref = self._get_repo_refs(repo.scm_instance, branch_rev=branch_rev)
 
            other_repos_info[repo.repo_name] = {
 
                'user': dict(user_id=repo.user.user_id,
 
                             username=repo.user.username,
 
                             firstname=repo.user.firstname,
 
                             lastname=repo.user.lastname,
 
                             gravatar_link=h.gravatar_url(repo.user.email, 14)),
 
                'description': repo.description.split('\n', 1)[0],
 
                'revs': h.select('other_ref', selected_other_ref, other_refs, class_='refs')
 
            }
 

	
 
        # add org repo to other so we can open pull request against peer branches on itself
 
        add_other_repo(org_repo, branch_rev=org_rev)
 
        c.default_other_repo = org_repo.repo_name
 

	
 
        # gather forks and add to this list ... even though it is rare to
 
        # request forks to pull from their parent
 
        for fork in org_repo.forks:
 
            add_other_repo(fork)
 

	
 
        # add parents of this fork also, but only if it's not empty
 
        if org_repo.parent and org_repo.parent.scm_instance.revisions:
 
            add_other_repo(org_repo.parent)
 
            c.default_other_repo = org_repo.parent.repo_name
 

	
 
        c.default_other_repo_info = other_repos_info[c.default_other_repo]
 
        c.other_repos_info = json.dumps(other_repos_info)
 

	
 
        return render('/pullrequests/pullrequest.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def create(self, repo_name):
 
        repo = RepoModel()._get_repo(repo_name)
 
        try:
 
            _form = PullRequestForm(repo.repo_id)().to_python(request.POST)
 
        except formencode.Invalid, errors:
 
            log.error(traceback.format_exc())
 
            log.error(str(errors))
 
            msg = _('Error creating pull request: %s') % errors.msg
 
            h.flash(msg, 'error')
 
            raise HTTPBadRequest
 

	
 
        # heads up: org and other might seem backward here ...
 
        org_repo_name = _form['org_repo']
 
        org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name
 
        org_repo = RepoModel()._get_repo(org_repo_name)
 
        (org_ref_type,
 
         org_ref_name,
 
         org_rev) = org_ref.split(':')
 

	
 
        other_repo_name = _form['other_repo']
 
        other_ref = _form['other_ref'] # will have symbolic name and head revision
 
        other_repo = RepoModel()._get_repo(other_repo_name)
 
        (other_ref_type,
 
         other_ref_name,
 
         other_rev) = other_ref.split(':')
 

	
 
        cs_ranges, _cs_ranges_not, ancestor_rev = \
 
            CompareController._get_changesets(org_repo.scm_instance.alias,
 
                                              other_repo.scm_instance, other_rev, # org and other "swapped"
 
                                              org_repo.scm_instance, org_rev,
 
                                              )
 
        revisions = [cs.raw_id for cs in cs_ranges]
 

	
 
        # hack: ancestor_rev is not an other_rev but we want to show the
 
        # requested destination and have the exact ancestor
 
        other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 

	
 
        reviewers = _form['review_members']
 

	
 
        title = _form['pullrequest_title']
 
        if not title:
 
            title = '%s#%s to %s#%s' % (org_repo_name, h.short_ref(org_ref_type, org_ref_name),
 
                                        other_repo_name, h.short_ref(other_ref_type, other_ref_name))
 
        description = _form['pullrequest_desc'].strip() or _('No description')
 
        try:
 
            pull_request = PullRequestModel().create(
 
                self.authuser.user_id, org_repo_name, org_ref, other_repo_name,
 
                other_ref, revisions, reviewers, title, description
 
            )
 
            Session().commit()
 
            h.flash(_('Successfully opened new pull request'),
 
                    category='success')
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            return redirect(url('pullrequest_home', repo_name=repo_name))
 

	
 
        return redirect(url('pullrequest_show', repo_name=other_repo_name,
 
                            pull_request_id=pull_request.pull_request_id))
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def copy_update(self, repo_name, pull_request_id):
 
        old_pull_request = PullRequest.get_or_404(pull_request_id)
 
        assert old_pull_request.other_repo.repo_name == repo_name
 
        if old_pull_request.is_closed():
 
            raise HTTPForbidden()
 

	
 
        org_repo = RepoModel()._get_repo(old_pull_request.org_repo.repo_name)
 
        org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':')
 
        updaterev = request.POST.get('updaterev')
 
        if updaterev:
 
            new_org_rev = self._get_ref_rev(org_repo, 'rev', updaterev)
 
        else:
 
            # assert org_ref_type == 'branch', org_ref_type # TODO: what if not?
 
            new_org_rev = self._get_ref_rev(org_repo, org_ref_type, org_ref_name)
 

	
 
        other_repo = RepoModel()._get_repo(old_pull_request.other_repo.repo_name)
 
        other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor
 
        #assert other_ref_type == 'branch', other_ref_type # TODO: what if not?
 
        new_other_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name)
 

	
 
        cs_ranges, _cs_ranges_not, ancestor_rev = CompareController._get_changesets(org_repo.scm_instance.alias,
 
            other_repo.scm_instance, new_other_rev, # org and other "swapped"
 
            org_repo.scm_instance, new_org_rev)
 

	
 
        old_revisions = set(old_pull_request.revisions)
 
        revisions = [cs.raw_id for cs in cs_ranges]
 
        new_revisions = [r for r in revisions if r not in old_revisions]
 
        lost = old_revisions.difference(revisions)
 

	
 
        infos = ['','', 'This is an update of %s "%s".' %
 
                 (url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                      pull_request_id=pull_request_id, qualified=True),
 
                 (h.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                      pull_request_id=pull_request_id),
 
                  old_pull_request.title)]
 

	
 
        if lost:
 
            infos.append(_('Missing changesets since the previous version:'))
 
            for r in old_pull_request.revisions:
 
                if r in lost:
 
                    desc = org_repo.get_changeset(r).message.split('\n')[0]
 
                    infos.append('  %s "%s"' % (h.short_id(r), desc))
 

	
 
        if new_revisions:
 
            infos.append(_('New changesets on %s %s since the previous version:') % (org_ref_type, org_ref_name))
 
            for r in reversed(revisions):
 
                if r in new_revisions:
 
                    desc = org_repo.get_changeset(r).message.split('\n')[0]
 
                    infos.append('  %s "%s"' % (h.short_id(r), desc))
 

	
 
            if ancestor_rev == other_rev:
 
                infos.append(_("Ancestor didn't change - show diff since previous version: %s .") %
 
                             url('compare_url',
 
                             h.canonical_url('compare_url',
 
                                 repo_name=org_repo.repo_name, # other_repo is always same as repo_name
 
                                 org_ref_type='rev', org_ref_name=h.short_id(org_rev), # use old org_rev as base
 
                                 other_ref_type='rev', other_ref_name=h.short_id(new_org_rev),
 
                                 qualified=True)) # note: linear diff, merge or not doesn't matter
 
                                 )) # note: linear diff, merge or not doesn't matter
 
            else:
 
                infos.append(_('This pull request uses another merge ancestor than the previous version and they are not directly comparable.'))
 
        else:
 
           infos.append(_('No changes found on %s %s since previous version.') % (org_ref_type, org_ref_name))
 
           # TODO: fail?
 

	
 
        # hack: ancestor_rev is not an other_ref but we want to show the
 
        # requested destination and have the exact ancestor
 
        new_other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 
        new_org_ref = '%s:%s:%s' % (org_ref_type, org_ref_name, new_org_rev)
 

	
 
        reviewers = [r.user_id for r in old_pull_request.reviewers]
 
        try:
 
            old_title, old_v = re.match(r'(.*)\(v(\d+)\)\s*$', old_pull_request.title).groups()
 
            v = int(old_v) + 1
 
        except (AttributeError, ValueError):
 
            old_title = old_pull_request.title
 
            v = 2
 
        title = '%s (v%s)' % (old_title.strip(), v)
 
        description = (old_pull_request.description.rstrip() +
 
                       '\n'.join(infos))
 

	
 
        try:
 
            pull_request = PullRequestModel().create(
 
                self.authuser.user_id,
 
                old_pull_request.org_repo.repo_name, new_org_ref,
 
                old_pull_request.other_repo.repo_name, new_other_ref,
 
                revisions, reviewers, title, description
 
            )
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            return redirect(url('pullrequest_show', repo_name=repo_name,
 
                                pull_request_id=pull_request_id))
 

	
 
        ChangesetCommentsModel().create(
 
            text=_('Closed, replaced by %s .') % url('pullrequest_show',
 
            text=_('Closed, replaced by %s .') % h.canonical_url('pullrequest_show',
 
                                                   repo_name=old_pull_request.other_repo.repo_name,
 
                                                   pull_request_id=pull_request.pull_request_id,
 
                                                   qualified=True),
 
                                                   pull_request_id=pull_request.pull_request_id),
 
            repo=old_pull_request.other_repo.repo_id,
 
            user=c.authuser.user_id,
 
            pull_request=pull_request_id,
 
            closing_pr=True)
 
        PullRequestModel().close_pull_request(pull_request_id)
 

	
 
        Session().commit()
 
        h.flash(_('Pull request update created'),
 
                category='success')
 

	
 
        return redirect(url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name,
 
                            pull_request_id=pull_request.pull_request_id))
 

	
 
    # pullrequest_post for PR editing
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def post(self, repo_name, pull_request_id):
 
        repo = RepoModel()._get_repo(repo_name)
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        old_description = pull_request.description
 

	
 
        _form = PullRequestPostForm()().to_python(request.POST)
 

	
 
        pull_request.title = _form['pullrequest_title']
 
        pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
 

	
 
        PullRequestModel().mention_from_description(pull_request, old_description)
 

	
 
        Session().commit()
 
        h.flash(_('Pull request updated'), category='success')
 

	
 
        return redirect(url('pullrequest_show', repo_name=repo.repo_name,
 
                            pull_request_id=pull_request_id))
 

	
 
    # pullrequest_update for updating reviewer list
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def update(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        if pull_request.is_closed():
 
            raise HTTPForbidden()
 
        #only owner or admin can update it
 
        owner = pull_request.author.user_id == c.authuser.user_id
 
        repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
 
        if h.HasPermissionAny('hg.admin') or repo_admin or owner:
 
            reviewers_ids = map(int, filter(lambda v: v not in [None, ''],
 
                request.POST.get('reviewers_ids', '').split(',')))
 

	
 
            PullRequestModel().update_reviewers(pull_request_id, reviewers_ids)
 
            Session().commit()
 
            return True
 
        raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def delete(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        #only owner can delete it !
 
        if pull_request.author.user_id == c.authuser.user_id:
 
            PullRequestModel().delete(pull_request)
 
            Session().commit()
 
            h.flash(_('Successfully deleted pull request'),
 
                    category='success')
 
            return redirect(url('my_account_pullrequests'))
 
        raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def show(self, repo_name, pull_request_id):
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 
        c.pull_request = PullRequest.get_or_404(pull_request_id)
 
        c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request)
 
        cc_model = ChangesetCommentsModel()
 
        cs_model = ChangesetStatusModel()
 

	
 
        # pull_requests repo_name we opened it against
 
        # ie. other_repo must match
 
        if repo_name != c.pull_request.other_repo.repo_name:
 
            raise HTTPNotFound
 

	
 
        # load compare data into template context
 
        self._load_compare_data(c.pull_request, enable_comments=True)
 

	
 
        # inline comments
 
        c.inline_cnt = 0
 
        c.inline_comments = cc_model.get_inline_comments(
 
                                c.db_repo.repo_id,
 
                                pull_request=pull_request_id)
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 
        # comments
 
        c.comments = cc_model.get_comments(c.db_repo.repo_id,
 
                                           pull_request=pull_request_id)
 

	
 
        # (badly named) pull-request status calculation based on reviewer votes
 
        (c.pull_request_reviewers,
 
         c.pull_request_pending_reviewers,
 
         c.current_voting_result,
 
         ) = cs_model.calculate_pull_request_result(c.pull_request)
 
        c.changeset_statuses = ChangesetStatus.STATUSES
 

	
 
        c.as_form = False
 
        c.ancestor = None # there is one - but right here we don't know which
 
        return render('/pullrequests/pullrequest_show.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def comment(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 

	
 
        status = request.POST.get('changeset_status')
 
        text = request.POST.get('text')
 
        close_pr = request.POST.get('save_close')
 

	
 
        allowed_to_change_status = self._get_is_allowed_change_status(pull_request)
 
        if status and allowed_to_change_status:
 
            _def = (_('Status change -> %s')
 
                            % ChangesetStatus.get_status_lbl(status))
 
            if close_pr:
 
                _def = _('Closing with') + ' ' + _def
 
            text = text or _def
 
        comm = ChangesetCommentsModel().create(
 
            text=text,
 
            repo=c.db_repo.repo_id,
 
            user=c.authuser.user_id,
 
            pull_request=pull_request_id,
 
            f_path=request.POST.get('f_path'),
 
            line_no=request.POST.get('line'),
 
            status_change=(ChangesetStatus.get_status_lbl(status)
 
                           if status and allowed_to_change_status else None),
 
            closing_pr=close_pr
 
        )
 

	
 
        action_logger(self.authuser,
 
                      'user_commented_pull_request:%s' % pull_request_id,
 
                      c.db_repo, self.ip_addr, self.sa)
 

	
 
        if allowed_to_change_status:
 
            # get status if set !
 
            if status:
 
                ChangesetStatusModel().set_status(
 
                    c.db_repo.repo_id,
 
                    status,
 
                    c.authuser.user_id,
 
                    comm,
 
                    pull_request=pull_request_id
 
                )
 

	
 
            if close_pr:
 
                PullRequestModel().close_pull_request(pull_request_id)
 
                action_logger(self.authuser,
 
                              'user_closed_pull_request:%s' % pull_request_id,
 
                              c.db_repo, self.ip_addr, self.sa)
 

	
 
        Session().commit()
 

	
 
        if not request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return redirect(h.url('pullrequest_show', repo_name=repo_name,
 
                                  pull_request_id=pull_request_id))
 

	
 
        data = {
 
           'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
 
        }
 
        if comm:
 
            c.co = comm
 
            data.update(comm.get_dict())
 
            data.update({'rendered_text':
 
                         render('changeset/changeset_comment_block.html')})
 

	
 
        return data
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def delete_comment(self, repo_name, comment_id):
 
        co = ChangesetComment.get(comment_id)
 
        if co.pull_request.is_closed():
 
            #don't allow deleting comments on closed pull request
 
            raise HTTPForbidden()
 

	
 
        owner = co.author.user_id == c.authuser.user_id
 
        repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
 
        if h.HasPermissionAny('hg.admin') or repo_admin or owner:
 
            ChangesetCommentsModel().delete(comment=co)
 
            Session().commit()
 
            return True
 
        else:
 
            raise HTTPForbidden()
kallithea/lib/dbmigrate/schema/db_1_5_2.py
Show inline comments
 
@@ -171,1540 +171,1540 @@ class Setting(Base, BaseModel):
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        if self.app_settings_name in ["ldap_active",
 
                                      "default_repo_enable_statistics",
 
                                      "default_repo_enable_locking",
 
                                      "default_repo_private",
 
                                      "default_repo_enable_downloads"]:
 
            v = str2bool(v)
 
        return v
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use unicode in app_settings_value
 

	
 
        :param val:
 
        """
 
        self._app_settings_value = safe_unicode(val)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__,
 
            self.app_settings_name, self.app_settings_value
 
        )
 

	
 
    @classmethod
 
    def get_by_name(cls, key):
 
        return cls.query()\
 
            .filter(cls.app_settings_name == key).scalar()
 

	
 
    @classmethod
 
    def get_by_name_or_create(cls, key):
 
        res = cls.get_by_name(key)
 
        if not res:
 
            res = cls(key)
 
        return res
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if not ret:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings[each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_ldap_settings(cls, cache=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('ldap_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd.update({row.app_settings_name: row.app_settings_value})
 

	
 
        return fd
 

	
 
    @classmethod
 
    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('default_')).all()
 
        fd = {}
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 

	
 
class Ui(Base, BaseModel):
 
    __tablename__ = DB_PREFIX + 'ui'
 
    __table_args__ = (
 
        UniqueConstraint('ui_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'changegroup.push_logger'
 
    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
 
    HOOK_PULL = 'outgoing.pull_logger'
 
    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
 

	
 
    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key).scalar()
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key) or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session().add(new_ui)
 

	
 
    def __repr__(self):
 
        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
 
                                   self.ui_value)
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        UniqueConstraint('username'), UniqueConstraint('email'),
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    DEFAULT_USER = 'default'
 
    DEFAULT_PERMISSIONS = [
 
        'hg.register.manual_activate', 'hg.create.repository',
 
        'hg.fork.repository', 'repository.read', 'group.read'
 
    ]
 
    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UserGroupMember', cascade='all')
 

	
 
    notifications = relationship('UserNotification', cascade='all')
 
    # notifications assigned to this user
 
    user_created_notifications = relationship('Notification', cascade='all')
 
    # comments created by this user
 
    user_comments = relationship('ChangesetComment', cascade='all')
 
    #extra emails for this user
 
    user_emails = relationship('UserEmailMap', cascade='all')
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 
    @property
 
    def firstname(self):
 
        # alias for future
 
        return self.name
 

	
 
    @property
 
    def emails(self):
 
        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
 
        return [self.email] + [x.email for x in other]
 

	
 
    @property
 
    def ip_addresses(self):
 
        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
 
        return [x.ip_addr for x in ret]
 

	
 
    @property
 
    def username_and_name(self):
 
        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name_or_username(self):
 
        return ('%s %s' % (self.firstname, self.lastname)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                     self.user_id, self.username)
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.username.ilike(username))
 
        else:
 
            q = cls.query().filter(cls.username == username)
 

	
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(username)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key, cache=False):
 
        q = cls.query().filter(cls.api_key == api_key)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_api_key_%s" % api_key))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_email(cls, email, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.email.ilike(email))
 
        else:
 
            q = cls.query().filter(cls.email == email)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_email_key_%s" % email))
 

	
 
        ret = q.scalar()
 
        if ret is None:
 
            q = UserEmailMap.query()
 
            # try fetching in alternate email map
 
            if case_insensitive:
 
                q = q.filter(UserEmailMap.email.ilike(email))
 
            else:
 
                q = q.filter(UserEmailMap.email == email)
 
            q = q.options(joinedload(UserEmailMap.user))
 
            if cache:
 
                q = q.options(FromCache("sql_cache_short",
 
                                        "get_email_map_key_%s" % email))
 
            ret = getattr(q.scalar(), 'user', None)
 

	
 
        return ret
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 
        self.last_login = datetime.datetime.now()
 
        Session().add(self)
 
        log.debug('updated user %s lastlogin' % self.username)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating user related data for API
 
        """
 
        user = self
 
        data = dict(
 
            user_id=user.user_id,
 
            username=user.username,
 
            firstname=user.name,
 
            lastname=user.lastname,
 
            email=user.email,
 
            emails=user.emails,
 
            api_key=user.api_key,
 
            active=user.active,
 
            admin=user.admin,
 
            ldap_dn=user.ldap_dn,
 
            last_login=user.last_login,
 
            ip_addresses=user.ip_addresses
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
            full_name=self.full_name,
 
            full_name_or_username=self.full_name_or_username,
 
            short_contact=self.short_contact,
 
            full_contact=self.full_contact
 
        )
 
        data.update(self.get_api_data())
 
        return data
 

	
 

	
 
class UserEmailMap(Base, BaseModel):
 
    __tablename__ = 'user_email_map'
 
    __table_args__ = (
 
        Index('uem_email_idx', 'email'),
 
        UniqueConstraint('email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    user = relationship('User', lazy='joined')
 

	
 
    @validates('_email')
 
    def validate_email(self, key, email):
 
        # check if this email is not main one
 
        main_email = Session().query(User).filter(User.email == email).scalar()
 
        if main_email is not None:
 
            raise AttributeError('email %s is present is user table' % email)
 
        return email
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 

	
 
class UserIpMap(Base, BaseModel):
 
    __tablename__ = 'user_ip_map'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'ip_addr'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    user = relationship('User', lazy='joined')
 

	
 
    @classmethod
 
    def _get_ip_range(cls, ip_addr):
 
        from kallithea.lib import ipaddr
 
        net = ipaddr.IPv4Network(ip_addr)
 
        return [str(net.network), str(net.broadcast)]
 

	
 
    def __json__(self):
 
        return dict(
 
          ip_addr=self.ip_addr,
 
          ip_range=self._get_ip_range(self.ip_addr)
 
        )
 

	
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    @property
 
    def action_as_day(self):
 
        return datetime.date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository', cascade='')
 

	
 

	
 
class UserGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
 
    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
 
    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 

	
 
    def __unicode__(self):
 
        return u'<userGroup(%s)>' % (self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False,
 
                          case_insensitive=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            q = cls.query().filter(cls.users_group_name == group_name)
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(group_name)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get(cls, users_group_id, cache=False):
 
        users_group = cls.query()
 
        if cache:
 
            users_group = users_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % users_group_id))
 
        return users_group.get(users_group_id)
 

	
 
    def get_api_data(self):
 
        users_group = self
 

	
 
        data = dict(
 
            users_group_id=users_group.users_group_id,
 
            group_name=users_group.users_group_name,
 
            active=users_group.users_group_active,
 
        )
 

	
 
        return data
 

	
 

	
 
class UserGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User', lazy='joined')
 
    users_group = relationship('UserGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        UniqueConstraint('repo_name'),
 
        Index('r_repo_name_idx', 'repo_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 
    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
 

	
 
    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
 
                             cascade='all')
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
 
                                   safe_unicode(self.repo_name))
 

	
 
    @hybrid_property
 
    def locked(self):
 
        # always should return [user_id, timelocked]
 
        if self._locked:
 
            _lock_info = self._locked.split(':')
 
            return int(_lock_info[0]), _lock_info[1]
 
        return [None, None]
 

	
 
    @locked.setter
 
    def locked(self, val):
 
        if val and isinstance(val, (list, tuple)):
 
            self._locked = ':'.join(map(str, val))
 
        else:
 
            self._locked = None
 

	
 
    @hybrid_property
 
    def changeset_cache(self):
 
        from kallithea.lib.vcs.backends.base import EmptyChangeset
 
        dummy = EmptyChangeset().__json__()
 
        if not self._changeset_cache:
 
            return dummy
 
        try:
 
            return json.loads(self._changeset_cache)
 
        except TypeError:
 
            return dummy
 

	
 
    @changeset_cache.setter
 
    def changeset_cache(self, val):
 
        try:
 
            self._changeset_cache = json.dumps(val)
 
        except:
 
            log.error(traceback.format_exc())
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def normalize_repo_name(cls, repo_name):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*p)
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def inject_ui(cls, repo, extras={}):
 
        from kallithea.lib.vcs.backends.hg import MercurialRepository
 
        from kallithea.lib.vcs.backends.git import GitRepository
 
        required = (MercurialRepository, GitRepository)
 
        if not isinstance(repo, required):
 
            raise Exception('repo must be instance of %s' % (','.join(required)))
 

	
 
        # inject ui extra param to log this action via push logger
 
        for k, v in extras.items():
 
            repo._repo.ui.setconfig('extras', k, v)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache
 
        )
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id):
 
        repo.locked = [user_id, time.time()]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        :type revisions: list
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, cache_map=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 
        log.debug('Getting cached instance of repo')
 

	
 
        if cache_map:
 
            # get using prefilled cache_map
 
            invalidate_repo = cache_map[self.repo_name]
 
            if invalidate_repo:
 
                invalidate_repo = (None if invalidate_repo.cache_active
 
                                   else invalidate_repo)
 
        else:
 
            # get from invalidate
 
            invalidate_repo = self.invalidate
 

	
 
        if invalidate_repo is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(invalidate_repo.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository' % alias)
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        UniqueConstraint('group_name', 'group_parent_id'),
 
        CheckConstraint('group_id != group_parent_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    __mapper_args__ = {'order_by': 'group_name'}
 

	
 
    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 

	
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                  self.group_name)
 

	
 
    @classmethod
 
    def groups_choices(cls, check_perms=False):
 
        from webhelpers.html import literal as _literal
 
        from kallithea.model.scm import ScmModel
 
        groups = cls.query().all()
 
        if check_perms:
 
            #filter group user have access to, it's done
 
            #magically inside ScmModel based on current user
 
            groups = ScmModel().get_repos_groups(groups)
 
        repo_groups = [('', '')]
 
        sep = ' &raquo; '
 
        _name = lambda k: _literal(sep.join(k))
 

	
 
        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
 
                              for x in groups])
 

	
 
        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
 
        return repo_groups
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 5
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinit loops
 
                log.error('group nested more than %s' %
 
                          parents_recursion_limit)
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(RepoGroup.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(RepoGroup.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query()\
 
                .filter(Repository.group == self)\
 
                .order_by(Repository.repo_name)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 
    def recursive_groups_and_repos(self):
 
        """
 
        Recursive return all groups, with repositories in those groups
 
        """
 
        all_ = []
 

	
 
        def _get_members(root_gr):
 
            for r in root_gr.repositories:
 
                all_.append(r)
 
            childs = root_gr.children.all()
 
            if childs:
 
                for gr in childs:
 
                    all_.append(gr)
 
                    _get_members(gr)
 

	
 
        _get_members(self)
 
        return [self] + all_
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return RepoGroup.url_sep().join(path_prefix + [group_name])
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = (
 
        Index('p_perm_name_idx', 'permission_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    PERMS = [
 
        ('repository.none', _('Repository no access')),
 
        ('repository.read', _('Repository read access')),
 
        ('repository.write', _('Repository write access')),
 
        ('repository.admin', _('Repository admin access')),
 

	
 
        ('group.none', _('Repositories Group no access')),
 
        ('group.read', _('Repositories Group read access')),
 
        ('group.write', _('Repositories Group write access')),
 
        ('group.admin', _('Repositories Group admin access')),
 

	
 
        ('hg.admin', _('Kallithea Administrator')),
 
        ('hg.create.none', _('Repository creation disabled')),
 
        ('hg.create.repository', _('Repository creation enabled')),
 
        ('hg.fork.none', _('Repository forking disabled')),
 
        ('hg.fork.repository', _('Repository forking enabled')),
 
        ('hg.register.none', _('Register disabled')),
 
        ('hg.register.manual_activate', _('Register new user with Kallithea '
 
                                          'with manual activation')),
 

	
 
        ('hg.register.auto_activate', _('Register new user with Kallithea '
 
                                        'with auto activation')),
 
    ]
 

	
 
    # defines which permissions are more important higher the more important
 
    PERM_WEIGHTS = {
 
        'repository.none': 0,
 
        'repository.read': 1,
 
        'repository.write': 3,
 
        'repository.admin': 4,
 

	
 
        'group.none': 0,
 
        'group.read': 1,
 
        'group.write': 3,
 
        'group.admin': 4,
 

	
 
        'hg.fork.none': 0,
 
        'hg.fork.repository': 1,
 
        'hg.create.none': 0,
 
        'hg.create.repository':1
 
    }
 

	
 
    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__, self.permission_id, self.permission_name
 
        )
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
    @classmethod
 
    def get_default_perms(cls, default_user_id):
 
        q = Session().query(UserRepoToPerm, Repository, cls)\
 
         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
 
         .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_group_perms(cls, default_user_id):
 
        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
 
         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 

	
 
class UserRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, repository, permission):
 
        n = cls()
 
        n.user = user
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<user:%s => %s >' % (self.user, self.repository)
 

	
 

	
 
class UserToPerm(Base, BaseModel):
 
    __tablename__ = 'user_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission', lazy='joined')
 

	
 

	
 
class UserGroupRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
    @classmethod
 
    def create(cls, users_group, repository, permission):
 
        n = cls()
 
        n.users_group = users_group
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
 

	
 

	
 
class UserGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'permission_id',),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    group = relationship('RepoGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserGroupRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'group_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    group = relationship('RepoGroup')
 

	
 

	
 
class Statistics(Base, BaseModel):
 
    __tablename__ = 'statistics'
 
    __table_args__ = (
 
         UniqueConstraint('repository_id'),
 
         {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
          'mysql_charset': 'utf8'}
 
    )
 
    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
 
    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
 
    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
 
    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
 
    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
 

	
 
    repository = relationship('Repository', single_parent=True)
 

	
 

	
 
class UserFollowing(Base, BaseModel):
 
    __tablename__ = 'user_followings'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'follows_repository_id'),
 
        UniqueConstraint('user_id', 'follows_user_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
 
    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 

	
 
    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
 
    follows_repository = relationship('Repository', order_by='Repository.repo_name')
 

	
 
    @classmethod
 
    def get_repo_followers(cls, repo_id):
 
        return cls.query().filter(cls.follows_repo_id == repo_id)
 

	
 

	
 
class CacheInvalidation(Base, BaseModel):
 
    __tablename__ = 'cache_invalidation'
 
    __table_args__ = (
 
        UniqueConstraint('cache_key'),
 
        Index('key_idx', 'cache_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
 

	
 
    def __init__(self, cache_key, cache_args=''):
 
        self.cache_key = cache_key
 
        self.cache_args = cache_args
 
        self.cache_active = False
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.cache_id, self.cache_key)
 

	
 
    @property
 
    def prefix(self):
 
        _split = self.cache_key.split(self.cache_args, 1)
 
        if _split and len(_split) == 2:
 
            return _split[0]
 
        return ''
 

	
 
    @classmethod
 
    def clear_cache(cls):
 
        cls.query().delete()
 

	
 
    @classmethod
 
    def _get_key(cls, key):
 
        """
 
        Wrapper for generating a key, together with a prefix
 

	
 
        :param key:
 
        """
 
        import kallithea
 
        prefix = ''
 
        org_key = key
 
        iid = kallithea.CONFIG.get('instance_id')
 
        if iid:
 
            prefix = iid
 

	
 
        return "%s%s" % (prefix, key), prefix, org_key
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.cache_key == key).scalar()
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        return cls.query().filter(cls.cache_args == repo_name).all()
 

	
 
    @classmethod
 
    def _get_or_create_key(cls, key, repo_name, commit=True):
 
        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
 
        if not inv_obj:
 
            try:
 
                inv_obj = CacheInvalidation(key, repo_name)
 
                Session().add(inv_obj)
 
                if commit:
 
                    Session().commit()
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                Session().rollback()
 
        return inv_obj
 

	
 
    @classmethod
 
    def invalidate(cls, key):
 
        """
 
        Returns Invalidation object if this given key should be invalidated
 
        None otherwise. `cache_active = False` means that this cache
 
        state is not valid and needs to be invalidated
 

	
 
        :param key:
 
        """
 
        repo_name = key
 
        repo_name = remove_suffix(repo_name, '_README')
 
        repo_name = remove_suffix(repo_name, '_RSS')
 
        repo_name = remove_suffix(repo_name, '_ATOM')
 

	
 
        # adds instance prefix
 
        key, _prefix, _org_key = cls._get_key(key)
 
        inv = cls._get_or_create_key(key, repo_name)
 

	
 
        if inv and inv.cache_active is False:
 
            return inv
 

	
 
    @classmethod
 
    def set_invalidate(cls, key=None, repo_name=None):
 
        """
 
        Mark this Cache key for invalidation, either by key or whole
 
        cache sets based on repo_name
 

	
 
        :param key:
 
        """
 
        if key:
 
            key, _prefix, _org_key = cls._get_key(key)
 
            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
 
        elif repo_name:
 
            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 

	
 
        log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s'
 
                  % (len(inv_objs), key, repo_name))
 
        try:
 
            for inv_obj in inv_objs:
 
                inv_obj.cache_active = False
 
                Session().add(inv_obj)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 

	
 
    @classmethod
 
    def set_valid(cls, key):
 
        """
 
        Mark this cache key as active and currently cached
 

	
 
        :param key:
 
        """
 
        inv_obj = cls.get_by_key(key)
 
        inv_obj.cache_active = True
 
        Session().add(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def get_cache_map(cls):
 

	
 
        class cachemapdict(dict):
 

	
 
            def __init__(self, *args, **kwargs):
 
                fixkey = kwargs.get('fixkey')
 
                if fixkey:
 
                    del kwargs['fixkey']
 
                self.fixkey = fixkey
 
                super(cachemapdict, self).__init__(*args, **kwargs)
 

	
 
            def __getattr__(self, name):
 
                key = name
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                if key in self.__dict__:
 
                    return self.__dict__[key]
 
                else:
 
                    return self[key]
 

	
 
            def __getitem__(self, key):
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                try:
 
                    return super(cachemapdict, self).__getitem__(key)
 
                except KeyError:
 
                    return
 

	
 
        cache_map = cachemapdict(fixkey=True)
 
        for obj in cls.query().all():
 
            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
 
        return cache_map
 

	
 

	
 
class ChangesetComment(Base, BaseModel):
 
    __tablename__ = 'changeset_comments'
kallithea/lib/dbmigrate/schema/db_1_6_0.py
Show inline comments
 
@@ -241,1540 +241,1540 @@ class Setting(Base, BaseModel):
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 

	
 
class Ui(Base, BaseModel):
 
    __tablename__ = DB_PREFIX + 'ui'
 
    __table_args__ = (
 
        UniqueConstraint('ui_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'changegroup.push_logger'
 
    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
 
    HOOK_PULL = 'outgoing.pull_logger'
 
    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
 

	
 
    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key).scalar()
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key) or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session().add(new_ui)
 

	
 
    def __repr__(self):
 
        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
 
                                   self.ui_value)
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        UniqueConstraint('username'), UniqueConstraint('email'),
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    DEFAULT_USER = 'default'
 
    DEFAULT_PERMISSIONS = [
 
        'hg.register.manual_activate', 'hg.create.repository',
 
        'hg.fork.repository', 'repository.read', 'group.read'
 
    ]
 
    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UserGroupMember', cascade='all')
 

	
 
    notifications = relationship('UserNotification', cascade='all')
 
    # notifications assigned to this user
 
    user_created_notifications = relationship('Notification', cascade='all')
 
    # comments created by this user
 
    user_comments = relationship('ChangesetComment', cascade='all')
 
    #extra emails for this user
 
    user_emails = relationship('UserEmailMap', cascade='all')
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 
    @property
 
    def firstname(self):
 
        # alias for future
 
        return self.name
 

	
 
    @property
 
    def emails(self):
 
        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
 
        return [self.email] + [x.email for x in other]
 

	
 
    @property
 
    def ip_addresses(self):
 
        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
 
        return [x.ip_addr for x in ret]
 

	
 
    @property
 
    def username_and_name(self):
 
        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name_or_username(self):
 
        return ('%s %s' % (self.firstname, self.lastname)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def AuthUser(self):
 
        """
 
        Returns instance of AuthUser for this user
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        return AuthUser(user_id=self.user_id, api_key=self.api_key,
 
                        username=self.username)
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                     self.user_id, self.username)
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.username.ilike(username))
 
        else:
 
            q = cls.query().filter(cls.username == username)
 

	
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(username)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key, cache=False):
 
        q = cls.query().filter(cls.api_key == api_key)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_api_key_%s" % api_key))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_email(cls, email, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.email.ilike(email))
 
        else:
 
            q = cls.query().filter(cls.email == email)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_email_key_%s" % email))
 

	
 
        ret = q.scalar()
 
        if ret is None:
 
            q = UserEmailMap.query()
 
            # try fetching in alternate email map
 
            if case_insensitive:
 
                q = q.filter(UserEmailMap.email.ilike(email))
 
            else:
 
                q = q.filter(UserEmailMap.email == email)
 
            q = q.options(joinedload(UserEmailMap.user))
 
            if cache:
 
                q = q.options(FromCache("sql_cache_short",
 
                                        "get_email_map_key_%s" % email))
 
            ret = getattr(q.scalar(), 'user', None)
 

	
 
        return ret
 

	
 
    @classmethod
 
    def get_from_cs_author(cls, author):
 
        """
 
        Tries to get User objects out of commit author string
 

	
 
        :param author:
 
        """
 
        from kallithea.lib.helpers import email, author_name
 
        # Valid email in the attribute passed, see if they're in the system
 
        _email = email(author)
 
        if _email:
 
            user = cls.get_by_email(_email, case_insensitive=True)
 
            if user:
 
                return user
 
        # Maybe we can match by username?
 
        _author = author_name(author)
 
        user = cls.get_by_username(_author, case_insensitive=True)
 
        if user:
 
            return user
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 
        self.last_login = datetime.datetime.now()
 
        Session().add(self)
 
        log.debug('updated user %s lastlogin' % self.username)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating user related data for API
 
        """
 
        user = self
 
        data = dict(
 
            user_id=user.user_id,
 
            username=user.username,
 
            firstname=user.name,
 
            lastname=user.lastname,
 
            email=user.email,
 
            emails=user.emails,
 
            api_key=user.api_key,
 
            active=user.active,
 
            admin=user.admin,
 
            ldap_dn=user.ldap_dn,
 
            last_login=user.last_login,
 
            ip_addresses=user.ip_addresses
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
            full_name=self.full_name,
 
            full_name_or_username=self.full_name_or_username,
 
            short_contact=self.short_contact,
 
            full_contact=self.full_contact
 
        )
 
        data.update(self.get_api_data())
 
        return data
 

	
 

	
 
class UserEmailMap(Base, BaseModel):
 
    __tablename__ = 'user_email_map'
 
    __table_args__ = (
 
        Index('uem_email_idx', 'email'),
 
        UniqueConstraint('email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    user = relationship('User', lazy='joined')
 

	
 
    @validates('_email')
 
    def validate_email(self, key, email):
 
        # check if this email is not main one
 
        main_email = Session().query(User).filter(User.email == email).scalar()
 
        if main_email is not None:
 
            raise AttributeError('email %s is present is user table' % email)
 
        return email
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 

	
 
class UserIpMap(Base, BaseModel):
 
    __tablename__ = 'user_ip_map'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'ip_addr'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    user = relationship('User', lazy='joined')
 

	
 
    @classmethod
 
    def _get_ip_range(cls, ip_addr):
 
        from kallithea.lib import ipaddr
 
        net = ipaddr.IPNetwork(address=ip_addr)
 
        return [str(net.network), str(net.broadcast)]
 

	
 
    def __json__(self):
 
        return dict(
 
          ip_addr=self.ip_addr,
 
          ip_range=self._get_ip_range(self.ip_addr)
 
        )
 

	
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    @property
 
    def action_as_day(self):
 
        return datetime.date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository', cascade='')
 

	
 

	
 
class UserGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
 
    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
 
    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 

	
 
    def __unicode__(self):
 
        return u'<userGroup(%s)>' % (self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False,
 
                          case_insensitive=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            q = cls.query().filter(cls.users_group_name == group_name)
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(group_name)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get(cls, users_group_id, cache=False):
 
        users_group = cls.query()
 
        if cache:
 
            users_group = users_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % users_group_id))
 
        return users_group.get(users_group_id)
 

	
 
    def get_api_data(self):
 
        users_group = self
 

	
 
        data = dict(
 
            users_group_id=users_group.users_group_id,
 
            group_name=users_group.users_group_name,
 
            active=users_group.users_group_active,
 
        )
 

	
 
        return data
 

	
 

	
 
class UserGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User', lazy='joined')
 
    users_group = relationship('UserGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 

	
 
class RepositoryField(Base, BaseModel):
 
    __tablename__ = 'repositories_fields'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
 

	
 
    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 
    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
 
    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
 
    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
 
    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
 
    field_type = Column("field_type", String(256), nullable=False, unique=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repository = relationship('Repository')
 

	
 
    @property
 
    def field_key_prefixed(self):
 
        return 'ex_%s' % self.field_key
 

	
 
    @classmethod
 
    def un_prefix_key(cls, key):
 
        if key.startswith(cls.PREFIX):
 
            return key[len(cls.PREFIX):]
 
        return key
 

	
 
    @classmethod
 
    def get_by_key_name(cls, key, repo):
 
        row = cls.query()\
 
                .filter(cls.repository == repo)\
 
                .filter(cls.field_key == key).scalar()
 
        return row
 

	
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        UniqueConstraint('repo_name'),
 
        Index('r_repo_name_idx', 'repo_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 
    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
 

	
 
    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
 
                             cascade='all')
 
    extra_fields = relationship('RepositoryField',
 
                                cascade="all, delete, delete-orphan")
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
 
                                   safe_unicode(self.repo_name))
 

	
 
    @hybrid_property
 
    def locked(self):
 
        # always should return [user_id, timelocked]
 
        if self._locked:
 
            _lock_info = self._locked.split(':')
 
            return int(_lock_info[0]), _lock_info[1]
 
        return [None, None]
 

	
 
    @locked.setter
 
    def locked(self, val):
 
        if val and isinstance(val, (list, tuple)):
 
            self._locked = ':'.join(map(str, val))
 
        else:
 
            self._locked = None
 

	
 
    @hybrid_property
 
    def changeset_cache(self):
 
        from kallithea.lib.vcs.backends.base import EmptyChangeset
 
        dummy = EmptyChangeset().__json__()
 
        if not self._changeset_cache:
 
            return dummy
 
        try:
 
            return json.loads(self._changeset_cache)
 
        except TypeError:
 
            return dummy
 

	
 
    @changeset_cache.setter
 
    def changeset_cache(self, val):
 
        try:
 
            self._changeset_cache = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def normalize_repo_name(cls, repo_name):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id):
 
        repo.locked = [user_id, time.time()]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        import kallithea.lib.helpers as h
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        parsed_url = urlparse(h.canonical_url('home'))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes'
 
                      % self.repo_name)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        :type revisions: list
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, cache_map=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 
        log.debug('Getting cached instance of repo')
 

	
 
        if cache_map:
 
            # get using prefilled cache_map
 
            invalidate_repo = cache_map[self.repo_name]
 
            if invalidate_repo:
 
                invalidate_repo = (None if invalidate_repo.cache_active
 
                                   else invalidate_repo)
 
        else:
 
            # get from invalidate
 
            invalidate_repo = self.invalidate
 

	
 
        if invalidate_repo is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(invalidate_repo.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        UniqueConstraint('group_name', 'group_parent_id'),
 
        CheckConstraint('group_id != group_parent_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    __mapper_args__ = {'order_by': 'group_name'}
 

	
 
    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 

	
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                  self.group_name)
 

	
 
    @classmethod
 
    def groups_choices(cls, groups=None, show_empty_group=True):
 
        from webhelpers.html import literal as _literal
 
        if not groups:
 
            groups = cls.query().all()
 

	
 
        repo_groups = []
 
        if show_empty_group:
 
            repo_groups = [('-1', '-- %s --' % _('top level'))]
 
        sep = ' &raquo; '
 
        _name = lambda k: _literal(sep.join(k))
 

	
 
        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
 
                              for x in groups])
 

	
 
        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
 
        return repo_groups
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 5
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinit loops
 
                log.error('group nested more than %s' %
 
                          parents_recursion_limit)
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(RepoGroup.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(RepoGroup.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query()\
 
                .filter(Repository.group == self)\
 
                .order_by(Repository.repo_name)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 
    def _recursive_objects(self, include_repos=True):
 
        all_ = []
 

	
 
        def _get_members(root_gr):
 
            if include_repos:
 
                for r in root_gr.repositories:
 
                    all_.append(r)
 
            childs = root_gr.children.all()
 
            if childs:
 
                for gr in childs:
 
                    all_.append(gr)
 
                    _get_members(gr)
 

	
 
        _get_members(self)
 
        return [self] + all_
 

	
 
    def recursive_groups_and_repos(self):
 
        """
 
        Recursive return all groups, with repositories in those groups
 
        """
 
        return self._recursive_objects()
 

	
 
    def recursive_groups(self):
 
        """
 
        Returns all children groups for this group including children of children
 
        """
 
        return self._recursive_objects(include_repos=False)
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return RepoGroup.url_sep().join(path_prefix + [group_name])
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = (
 
        Index('p_perm_name_idx', 'permission_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    PERMS = [
 
        ('repository.none', _('Repository no access')),
 
        ('repository.read', _('Repository read access')),
 
        ('repository.write', _('Repository write access')),
 
        ('repository.admin', _('Repository admin access')),
 

	
 
        ('group.none', _('Repository group no access')),
 
        ('group.read', _('Repository group read access')),
 
        ('group.write', _('Repository group write access')),
 
        ('group.admin', _('Repository group admin access')),
 

	
 
        ('hg.admin', _('Kallithea Administrator')),
 
        ('hg.create.none', _('Repository creation disabled')),
 
        ('hg.create.repository', _('Repository creation enabled')),
 
        ('hg.fork.none', _('Repository forking disabled')),
 
        ('hg.fork.repository', _('Repository forking enabled')),
 
        ('hg.register.none', _('Register disabled')),
 
        ('hg.register.manual_activate', _('Register new user with Kallithea '
 
                                          'with manual activation')),
 

	
 
        ('hg.register.auto_activate', _('Register new user with Kallithea '
 
                                        'with auto activation')),
 
    ]
 

	
 
    # defines which permissions are more important higher the more important
 
    PERM_WEIGHTS = {
 
        'repository.none': 0,
 
        'repository.read': 1,
 
        'repository.write': 3,
 
        'repository.admin': 4,
 

	
 
        'group.none': 0,
 
        'group.read': 1,
 
        'group.write': 3,
 
        'group.admin': 4,
 

	
 
        'hg.fork.none': 0,
 
        'hg.fork.repository': 1,
 
        'hg.create.none': 0,
 
        'hg.create.repository':1
 
    }
 

	
 
    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__, self.permission_id, self.permission_name
 
        )
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
    @classmethod
 
    def get_default_perms(cls, default_user_id):
 
        q = Session().query(UserRepoToPerm, Repository, cls)\
 
         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
 
         .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_group_perms(cls, default_user_id):
 
        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
 
         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 

	
 
class UserRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, repository, permission):
 
        n = cls()
 
        n.user = user
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<user:%s => %s >' % (self.user, self.repository)
 

	
 

	
 
class UserToPerm(Base, BaseModel):
 
    __tablename__ = 'user_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission', lazy='joined')
 

	
 

	
 
class UserGroupRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
    @classmethod
 
    def create(cls, users_group, repository, permission):
 
        n = cls()
 
        n.users_group = users_group
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
 

	
 

	
 
class UserGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'permission_id',),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    group = relationship('RepoGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserGroupRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'group_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    group = relationship('RepoGroup')
 

	
 

	
 
class Statistics(Base, BaseModel):
 
    __tablename__ = 'statistics'
 
    __table_args__ = (
 
         UniqueConstraint('repository_id'),
 
         {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
          'mysql_charset': 'utf8'}
 
    )
 
    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
 
    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
 
    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
 
    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
 
    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
 

	
 
    repository = relationship('Repository', single_parent=True)
 

	
 

	
 
class UserFollowing(Base, BaseModel):
 
    __tablename__ = 'user_followings'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'follows_repository_id'),
 
        UniqueConstraint('user_id', 'follows_user_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
 
    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 

	
 
    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
 
    follows_repository = relationship('Repository', order_by='Repository.repo_name')
 

	
 
    @classmethod
 
    def get_repo_followers(cls, repo_id):
 
        return cls.query().filter(cls.follows_repo_id == repo_id)
 

	
 

	
 
class CacheInvalidation(Base, BaseModel):
 
    __tablename__ = 'cache_invalidation'
 
    __table_args__ = (
 
        UniqueConstraint('cache_key'),
 
        Index('key_idx', 'cache_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    # cache_id, not used
 
    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    # cache_key as created by _get_cache_key
 
    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    # cache_args is usually a repo_name, possibly with _README/_RSS/_ATOM suffix
 
    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    # instance sets cache_active True when it is caching, other instances set cache_active to False to invalidate
 
    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
 

	
 
    def __init__(self, cache_key, cache_args=''):
 
        self.cache_key = cache_key
 
        self.cache_args = cache_args
 
        self.cache_active = False
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.cache_id, self.cache_key)
 

	
 
    def get_prefix(self):
 
        """
 
        Guess prefix that might have been used in _get_cache_key to generate self.cache_key .
 
        Only used for informational purposes in repo_edit.html .
 
        """
 
        _split = self.cache_key.split(self.cache_args, 1)
 
        if len(_split) == 2:
 
            return _split[0]
 
        return ''
 

	
 
    @classmethod
 
    def _get_cache_key(cls, key):
 
        """
 
        Wrapper for generating a unique cache key for this instance and "key".
 
        """
 
        import kallithea
 
        prefix = kallithea.CONFIG.get('instance_id', '')
 
        return "%s%s" % (prefix, key)
 

	
 
    @classmethod
 
    def _get_or_create_inv_obj(cls, key, repo_name, commit=True):
 
        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
 
        if not inv_obj:
 
            try:
 
                inv_obj = CacheInvalidation(key, repo_name)
 
                Session().add(inv_obj)
 
                if commit:
 
                    Session().commit()
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                Session().rollback()
 
        return inv_obj
 

	
 
    @classmethod
 
    def invalidate(cls, key):
 
        """
 
        Returns Invalidation object if this given key should be invalidated
 
        None otherwise. `cache_active = False` means that this cache
 
        state is not valid and needs to be invalidated
 

	
 
        :param key:
 
        """
 
        repo_name = key
 
        repo_name = remove_suffix(repo_name, '_README')
 
        repo_name = remove_suffix(repo_name, '_RSS')
 
        repo_name = remove_suffix(repo_name, '_ATOM')
 

	
 
        cache_key = cls._get_cache_key(key)
 
        inv = cls._get_or_create_inv_obj(cache_key, repo_name)
 

	
 
        if inv and not inv.cache_active:
 
            return inv
 

	
 
    @classmethod
 
    def set_invalidate(cls, key=None, repo_name=None):
 
        """
 
        Mark this Cache key for invalidation, either by key or whole
 
        cache sets based on repo_name
 

	
 
        :param key:
 
        """
 
        invalidated_keys = []
 
        if key:
 
            assert not repo_name
 
            cache_key = cls._get_cache_key(key)
 
            inv_objs = Session().query(cls).filter(cls.cache_key == cache_key).all()
 
        else:
 
            assert repo_name
 
            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 

	
 
        try:
 
            for inv_obj in inv_objs:
 
                inv_obj.cache_active = False
 
                log.debug('marking %s key for invalidation based on key=%s,repo_name=%s'
 
                  % (inv_obj, key, safe_str(repo_name)))
 
                invalidated_keys.append(inv_obj.cache_key)
 
                Session().add(inv_obj)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 
        return invalidated_keys
 

	
 
    @classmethod
 
    def set_valid(cls, key):
 
        """
 
        Mark this cache key as active and currently cached
 

	
 
        :param key:
 
        """
 
        inv_obj = cls.query().filter(cls.cache_key == key).scalar()
 
        inv_obj.cache_active = True
 
        Session().add(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def get_cache_map(cls):
 

	
 
        class cachemapdict(dict):
 

	
 
            def __init__(self, *args, **kwargs):
 
                self.fixkey = kwargs.pop('fixkey', False)
 
                super(cachemapdict, self).__init__(*args, **kwargs)
 

	
 
            def __getattr__(self, name):
 
                cache_key = name
 
                if self.fixkey:
 
                    cache_key = cls._get_cache_key(name)
 
                if cache_key in self.__dict__:
 
                    return self.__dict__[cache_key]
 
                else:
 
                    return self[cache_key]
 

	
 
            def __getitem__(self, name):
 
                cache_key = name
 
                if self.fixkey:
 
                    cache_key = cls._get_cache_key(name)
 
                try:
 
                    return super(cachemapdict, self).__getitem__(cache_key)
 
                except KeyError:
 
                    return None
 

	
 
        cache_map = cachemapdict(fixkey=True)
 
        for obj in cls.query().all():
 
            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)