Changeset - aa1891074dd6
[Not reviewed]
default
0 5 0
Mads Kiilerich - 10 years ago 2015-10-08 23:21:58
madski@unity3d.com
cache: when invalidating a cache, always just delete all 'live cache' records instead of marking them inactive

Keep it simple. Adding the record again might be slightly more expensive than
just updating the active flag but instead we get get a simpler model and
automatic cleanup without using the cache-keys paster command.
5 files changed with 10 insertions and 15 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/admin/repos.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.repos
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Repositories controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 7, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
import formencode
 
from formencode import htmlfill
 
from webob.exc import HTTPInternalServerError, HTTPForbidden, HTTPNotFound
 
from pylons import request, tmpl_context as c, url
 
from pylons.controllers.util import redirect
 
from pylons.i18n.translation import _
 
from sqlalchemy.sql.expression import func
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import LoginRequired, \
 
    HasRepoPermissionAllDecorator, NotAnonymous, HasPermissionAny, \
 
    HasRepoPermissionAnyDecorator
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.utils import action_logger, jsonify
 
from kallithea.lib.vcs import RepositoryError
 
from kallithea.model.meta import Session
 
from kallithea.model.db import User, Repository, UserFollowing, RepoGroup,\
 
    Setting, RepositoryField
 
from kallithea.model.forms import RepoForm, RepoFieldForm, RepoPermsForm
 
from kallithea.model.scm import ScmModel, AvailableRepoGroupChoices, RepoList
 
from kallithea.model.repo import RepoModel
 
from kallithea.lib.compat import json
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.lib.utils2 import safe_int
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ReposController(BaseRepoController):
 
    """
 
    REST Controller styled on the Atom Publishing Protocol"""
 
    # To properly map this controller, ensure your config/routing.py
 
    # file has a resource setup:
 
    #     map.resource('repo', 'repos')
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(ReposController, self).__before__()
 

	
 
    def _load_repo(self, repo_name):
 
        repo_obj = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_obj is None:
 
            h.not_mapped_error(repo_name)
 
            return redirect(url('repos'))
 

	
 
        return repo_obj
 

	
 
    def __load_defaults(self, repo=None):
 
        top_perms = ['hg.create.repository']
 
        repo_group_perms = ['group.admin']
 
        if HasPermissionAny('hg.create.write_on_repogroup.true')():
 
            repo_group_perms.append('group.write')
 
        extras = [] if repo is None else [repo.group]
 

	
 
        c.repo_groups = AvailableRepoGroupChoices(top_perms, repo_group_perms, extras)
 

	
 
        c.landing_revs_choices, c.landing_revs = ScmModel().get_repo_landing_revs(repo)
 

	
 
    def __load_data(self, repo_name=None):
 
        """
 
        Load defaults settings for edit, and update
 

	
 
        :param repo_name:
 
        """
 
        c.repo_info = self._load_repo(repo_name)
 
        self.__load_defaults(c.repo_info)
 

	
 
        defaults = RepoModel()._get_defaults(repo_name)
 
        defaults['clone_uri'] = c.repo_info.clone_uri_hidden # don't show password
 

	
 
        return defaults
 

	
 
    def index(self, format='html'):
 
        """GET /repos: All items in the collection"""
 
        # url('repos')
 
        _list = Repository.query()\
 
                        .order_by(func.lower(Repository.repo_name))\
 
                        .all()
 

	
 
        c.repos_list = RepoList(_list, perm_set=['repository.admin'])
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=c.repos_list,
 
                                                   admin=True,
 
                                                   super_user_actions=True)
 
        #json used to render the grid
 
        c.data = json.dumps(repos_data)
 

	
 
        return render('admin/repos/repos.html')
 

	
 
    @NotAnonymous()
 
    def create(self):
 
        """
 
        POST /repos: Create a new item"""
 
        # url('repos')
 

	
 
        self.__load_defaults()
 
        form_result = {}
 
        task_id = None
 
        try:
 
            # CanWriteGroup validators checks permissions of this POST
 
            form_result = RepoForm(repo_groups=c.repo_groups,
 
                                   landing_revs=c.landing_revs_choices)()\
 
                            .to_python(dict(request.POST))
 

	
 
            # create is done sometimes async on celery, db transaction
 
            # management is handled there.
 
            task = RepoModel().create(form_result, self.authuser.user_id)
 
            from celery.result import BaseAsyncResult
 
            if isinstance(task, BaseAsyncResult):
 
                task_id = task.task_id
 
        except formencode.Invalid as errors:
 
            log.info(errors)
 
            return htmlfill.render(
 
                render('admin/repos/repo_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                force_defaults=False,
 
                encoding="UTF-8")
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            msg = (_('Error creating repository %s')
 
                   % form_result.get('repo_name'))
 
            h.flash(msg, category='error')
 
            return redirect(url('home'))
 

	
 
        return redirect(h.url('repo_creating_home',
 
                              repo_name=form_result['repo_name_full'],
 
                              task_id=task_id))
 

	
 
    @NotAnonymous()
 
    def create_repository(self):
 
        """GET /_admin/create_repository: Form to create a new item"""
 
        self.__load_defaults()
 
        if not c.repo_groups:
 
            raise HTTPForbidden
 
        parent_group = request.GET.get('parent_group')
 

	
 
        ## apply the defaults from defaults page
 
        defaults = Setting.get_default_repo_settings(strip_prefix=True)
 
        if parent_group:
 
            prg = RepoGroup.get(parent_group)
 
            if prg is None or not any(rgc[0] == prg.group_id
 
                                      for rgc in c.repo_groups):
 
                raise HTTPForbidden
 
            defaults.update({'repo_group': parent_group})
 

	
 
        return htmlfill.render(
 
            render('admin/repos/repo_add.html'),
 
            defaults=defaults,
 
            errors={},
 
            prefix_error=False,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def repo_creating(self, repo_name):
 
        c.repo = repo_name
 
        c.task_id = request.GET.get('task_id')
 
        if not c.repo:
 
            raise HTTPNotFound()
 
        return render('admin/repos/repo_creating.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @jsonify
 
    def repo_check(self, repo_name):
 
        c.repo = repo_name
 
        task_id = request.GET.get('task_id')
 

	
 
        if task_id and task_id not in ['None']:
 
            from kallithea import CELERY_ON
 
            from celery.result import AsyncResult
 
            if CELERY_ON:
 
                task = AsyncResult(task_id)
 
                if task.failed():
 
                    raise HTTPInternalServerError(task.traceback)
 

	
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo and repo.repo_state == Repository.STATE_CREATED:
 
            if repo.clone_uri:
 
                h.flash(_('Created repository %s from %s')
 
                        % (repo.repo_name, repo.clone_uri_hidden), category='success')
 
            else:
 
                repo_url = h.link_to(repo.repo_name,
 
                                     h.url('summary_home',
 
                                           repo_name=repo.repo_name))
 
                fork = repo.fork
 
                if fork is not None:
 
                    fork_name = fork.repo_name
 
                    h.flash(h.literal(_('Forked repository %s as %s')
 
                            % (fork_name, repo_url)), category='success')
 
                else:
 
                    h.flash(h.literal(_('Created repository %s') % repo_url),
 
                            category='success')
 
            return {'result': True}
 
        return {'result': False}
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def update(self, repo_name):
 
        """
 
        PUT /repos/repo_name: Update an existing item"""
 
        # Forms posted to this method should contain a hidden field:
 
        #    <input type="hidden" name="_method" value="PUT" />
 
        # Or using helpers:
 
        #    h.form(url('put_repo', repo_name=ID),
 
        #           method='put')
 
        # url('put_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        self.__load_defaults(c.repo_info)
 
        c.active = 'settings'
 
        c.repo_fields = RepositoryField.query()\
 
            .filter(RepositoryField.repository == c.repo_info).all()
 

	
 
        repo_model = RepoModel()
 
        changed_name = repo_name
 
        repo = Repository.get_by_repo_name(repo_name)
 
        old_data = {
 
            'repo_name': repo_name,
 
            'repo_group': repo.group.get_dict() if repo.group else {},
 
            'repo_type': repo.repo_type,
 
        }
 
        _form = RepoForm(edit=True, old_data=old_data,
 
                         repo_groups=c.repo_groups,
 
                         landing_revs=c.landing_revs_choices)()
 

	
 
        try:
 
            form_result = _form.to_python(dict(request.POST))
 
            repo = repo_model.update(repo_name, **form_result)
 
            ScmModel().mark_for_invalidation(repo_name)
 
            h.flash(_('Repository %s updated successfully') % repo_name,
 
                    category='success')
 
            changed_name = repo.repo_name
 
            action_logger(self.authuser, 'admin_updated_repo',
 
                              changed_name, self.ip_addr, self.sa)
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            log.info(errors)
 
            defaults = self.__load_data(repo_name)
 
            defaults.update(errors.value)
 
            c.users_array = repo_model.get_users_js()
 
            return htmlfill.render(
 
                render('admin/repos/repo_edit.html'),
 
                defaults=defaults,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of repository %s') \
 
                    % repo_name, category='error')
 
        return redirect(url('edit_repo', repo_name=changed_name))
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def delete(self, repo_name):
 
        """
 
        DELETE /repos/repo_name: Delete an existing item"""
 
        # Forms posted to this method should contain a hidden field:
 
        #    <input type="hidden" name="_method" value="DELETE" />
 
        # Or using helpers:
 
        #    h.form(url('delete_repo', repo_name=ID),
 
        #           method='delete')
 
        # url('delete_repo', repo_name=ID)
 

	
 
        repo_model = RepoModel()
 
        repo = repo_model.get_by_repo_name(repo_name)
 
        if not repo:
 
            h.not_mapped_error(repo_name)
 
            return redirect(url('repos'))
 
        try:
 
            _forks = repo.forks.count()
 
            handle_forks = None
 
            if _forks and request.POST.get('forks'):
 
                do = request.POST['forks']
 
                if do == 'detach_forks':
 
                    handle_forks = 'detach'
 
                    h.flash(_('Detached %s forks') % _forks, category='success')
 
                elif do == 'delete_forks':
 
                    handle_forks = 'delete'
 
                    h.flash(_('Deleted %s forks') % _forks, category='success')
 
            repo_model.delete(repo, forks=handle_forks)
 
            action_logger(self.authuser, 'admin_deleted_repo',
 
                  repo_name, self.ip_addr, self.sa)
 
            ScmModel().mark_for_invalidation(repo_name)
 
            h.flash(_('Deleted repository %s') % repo_name, category='success')
 
            Session().commit()
 
        except AttachedForksError:
 
            h.flash(_('Cannot delete repository %s which still has forks')
 
                        % repo_name, category='warning')
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during deletion of %s') % repo_name,
 
                    category='error')
 

	
 
        if repo.group:
 
            return redirect(url('repos_group_home', group_name=repo.group.group_name))
 
        return redirect(url('repos'))
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        defaults = self.__load_data(repo_name)
 
        c.repo_fields = RepositoryField.query()\
 
            .filter(RepositoryField.repository == c.repo_info).all()
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.active = 'settings'
 
        return htmlfill.render(
 
            render('admin/repos/repo_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_permissions(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 
        c.active = 'permissions'
 
        defaults = RepoModel()._get_defaults(repo_name)
 

	
 
        return htmlfill.render(
 
            render('admin/repos/repo_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def edit_permissions_update(self, repo_name):
 
        form = RepoPermsForm()().to_python(request.POST)
 
        RepoModel()._update_permissions(repo_name, form['perms_new'],
 
                                        form['perms_updates'])
 
        #TODO: implement this
 
        #action_logger(self.authuser, 'admin_changed_repo_permissions',
 
        #              repo_name, self.ip_addr, self.sa)
 
        Session().commit()
 
        h.flash(_('Repository permissions updated'), category='success')
 
        return redirect(url('edit_repo_perms', repo_name=repo_name))
 

	
 
    def edit_permissions_revoke(self, repo_name):
 
        try:
 
            obj_type = request.POST.get('obj_type')
 
            obj_id = None
 
            if obj_type == 'user':
 
                obj_id = safe_int(request.POST.get('user_id'))
 
            elif obj_type == 'user_group':
 
                obj_id = safe_int(request.POST.get('user_group_id'))
 

	
 
            if obj_type == 'user':
 
                RepoModel().revoke_user_permission(repo=repo_name, user=obj_id)
 
            elif obj_type == 'user_group':
 
                RepoModel().revoke_user_group_permission(
 
                    repo=repo_name, group_name=obj_id
 
                )
 
            #TODO: implement this
 
            #action_logger(self.authuser, 'admin_revoked_repo_permissions',
 
            #              repo_name, self.ip_addr, self.sa)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during revoking of permission'),
 
                    category='error')
 
            raise HTTPInternalServerError()
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_fields(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        c.repo_fields = RepositoryField.query()\
 
            .filter(RepositoryField.repository == c.repo_info).all()
 
        c.active = 'fields'
 
        if request.POST:
 

	
 
            return redirect(url('repo_edit_fields'))
 
        return render('admin/repos/repo_edit.html')
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def create_repo_field(self, repo_name):
 
        try:
 
            form_result = RepoFieldForm()().to_python(dict(request.POST))
 
            new_field = RepositoryField()
 
            new_field.repository = Repository.get_by_repo_name(repo_name)
 
            new_field.field_key = form_result['new_field_key']
 
            new_field.field_type = form_result['new_field_type']  # python type
 
            new_field.field_value = form_result['new_field_value']  # set initial blank value
 
            new_field.field_desc = form_result['new_field_desc']
 
            new_field.field_label = form_result['new_field_label']
 
            Session().add(new_field)
 
            Session().commit()
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            msg = _('An error occurred during creation of field')
 
            if isinstance(e, formencode.Invalid):
 
                msg += ". " + e.msg
 
            h.flash(msg, category='error')
 
        return redirect(url('edit_repo_fields', repo_name=repo_name))
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def delete_repo_field(self, repo_name, field_id):
 
        field = RepositoryField.get_or_404(field_id)
 
        try:
 
            Session().delete(field)
 
            Session().commit()
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            msg = _('An error occurred during removal of field')
 
            h.flash(msg, category='error')
 
        return redirect(url('edit_repo_fields', repo_name=repo_name))
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_advanced(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        c.default_user_id = User.get_default_user().user_id
 
        c.in_public_journal = UserFollowing.query()\
 
            .filter(UserFollowing.user_id == c.default_user_id)\
 
            .filter(UserFollowing.follows_repository == c.repo_info).scalar()
 

	
 
        _repos = Repository.query().order_by(Repository.repo_name).all()
 
        read_access_repos = RepoList(_repos)
 
        c.repos_list = [(None, _('-- Not a fork --'))]
 
        c.repos_list += [(x.repo_id, x.repo_name)
 
                         for x in read_access_repos
 
                         if x.repo_id != c.repo_info.repo_id]
 

	
 
        defaults = {
 
            'id_fork_of': c.repo_info.fork.repo_id if c.repo_info.fork else ''
 
        }
 

	
 
        c.active = 'advanced'
 
        if request.POST:
 
            return redirect(url('repo_edit_advanced'))
 
        return htmlfill.render(
 
            render('admin/repos/repo_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_advanced_journal(self, repo_name):
 
        """
 
        Sets this repository to be visible in public journal,
 
        in other words asking default user to follow this repo
 

	
 
        :param repo_name:
 
        """
 

	
 
        try:
 
            repo_id = Repository.get_by_repo_name(repo_name).repo_id
 
            user_id = User.get_default_user().user_id
 
            self.scm_model.toggle_following_repo(repo_id, user_id)
 
            h.flash(_('Updated repository visibility in public journal'),
 
                    category='success')
 
            Session().commit()
 
        except Exception:
 
            h.flash(_('An error occurred during setting this'
 
                      ' repository in public journal'),
 
                    category='error')
 
        return redirect(url('edit_repo_advanced', repo_name=repo_name))
 

	
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_advanced_fork(self, repo_name):
 
        """
 
        Mark given repository as a fork of another
 

	
 
        :param repo_name:
 
        """
 
        try:
 
            fork_id = request.POST.get('id_fork_of')
 
            repo = ScmModel().mark_as_fork(repo_name, fork_id,
 
                                           self.authuser.username)
 
            fork = repo.fork.repo_name if repo.fork else _('Nothing')
 
            Session().commit()
 
            h.flash(_('Marked repository %s as fork of %s') % (repo_name, fork),
 
                    category='success')
 
        except RepositoryError as e:
 
            log.error(traceback.format_exc())
 
            h.flash(str(e), category='error')
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during this operation'),
 
                    category='error')
 

	
 
        return redirect(url('edit_repo_advanced', repo_name=repo_name))
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_advanced_locking(self, repo_name):
 
        """
 
        Unlock repository when it is locked !
 

	
 
        :param repo_name:
 
        """
 
        try:
 
            repo = Repository.get_by_repo_name(repo_name)
 
            if request.POST.get('set_lock'):
 
                Repository.lock(repo, c.authuser.user_id)
 
                h.flash(_('Repository has been locked'), category='success')
 
            elif request.POST.get('set_unlock'):
 
                Repository.unlock(repo)
 
                h.flash(_('Repository has been unlocked'), category='success')
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during unlocking'),
 
                    category='error')
 
        return redirect(url('edit_repo_advanced', repo_name=repo_name))
 

	
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def toggle_locking(self, repo_name):
 
        """
 
        Toggle locking of repository by simple GET call to url
 

	
 
        :param repo_name:
 
        """
 

	
 
        try:
 
            repo = Repository.get_by_repo_name(repo_name)
 

	
 
            if repo.enable_locking:
 
                if repo.locked[0]:
 
                    Repository.unlock(repo)
 
                    h.flash(_('Repository has been unlocked'), category='success')
 
                else:
 
                    Repository.lock(repo, c.authuser.user_id)
 
                    h.flash(_('Repository has been locked'), category='success')
 

	
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during unlocking'),
 
                    category='error')
 
        return redirect(url('summary_home', repo_name=repo_name))
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_caches(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        c.active = 'caches'
 
        if request.POST:
 
            try:
 
                ScmModel().mark_for_invalidation(repo_name, delete=True)
 
                ScmModel().mark_for_invalidation(repo_name)
 
                Session().commit()
 
                h.flash(_('Cache invalidation successful'),
 
                        category='success')
 
            except Exception as e:
 
                log.error(traceback.format_exc())
 
                h.flash(_('An error occurred during cache invalidation'),
 
                        category='error')
 

	
 
            return redirect(url('edit_repo_caches', repo_name=c.repo_name))
 
        return render('admin/repos/repo_edit.html')
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_remote(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        c.active = 'remote'
 
        if request.POST:
 
            try:
 
                ScmModel().pull_changes(repo_name, self.authuser.username)
 
                h.flash(_('Pulled from remote location'), category='success')
 
            except Exception as e:
 
                log.error(traceback.format_exc())
 
                h.flash(_('An error occurred during pull from remote location'),
 
                        category='error')
 
            return redirect(url('edit_repo_remote', repo_name=c.repo_name))
 
        return render('admin/repos/repo_edit.html')
 

	
 
    @HasRepoPermissionAllDecorator('repository.admin')
 
    def edit_statistics(self, repo_name):
 
        """GET /repo_name/settings: Form to edit an existing item"""
 
        # url('edit_repo', repo_name=ID)
 
        c.repo_info = self._load_repo(repo_name)
 
        repo = c.repo_info.scm_instance
 

	
 
        if c.repo_info.stats:
 
            # this is on what revision we ended up so we add +1 for count
 
            last_rev = c.repo_info.stats.stat_on_revision + 1
 
        else:
 
            last_rev = 0
 
        c.stats_revision = last_rev
 

	
 
        c.repo_last_rev = repo.count() if repo.revisions else 0
 

	
 
        if last_rev == 0 or c.repo_last_rev == 0:
 
            c.stats_percentage = 0
 
        else:
 
            c.stats_percentage = '%.2f' % ((float((last_rev)) / c.repo_last_rev) * 100)
 

	
 
        c.active = 'statistics'
 
        if request.POST:
 
            try:
 
                RepoModel().delete_stats(repo_name)
 
                Session().commit()
 
            except Exception as e:
 
                log.error(traceback.format_exc())
 
                h.flash(_('An error occurred during deletion of repository stats'),
 
                        category='error')
 
            return redirect(url('edit_repo_statistics', repo_name=c.repo_name))
 

	
 
        return render('admin/repos/repo_edit.html')
kallithea/controllers/admin/settings.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.settings
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
settings controller for Kallithea admin
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
import formencode
 

	
 
from formencode import htmlfill
 
from pylons import request, tmpl_context as c, url, config
 
from pylons.controllers.util import redirect
 
from pylons.i18n.translation import _
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import LoginRequired, HasPermissionAllDecorator
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.celerylib import tasks, run_task
 
from kallithea.lib.exceptions import HgsubversionImportError
 
from kallithea.lib.utils import repo2db_mapper, set_app_settings
 
from kallithea.model.db import Ui, Repository, Setting
 
from kallithea.model.forms import ApplicationSettingsForm, \
 
    ApplicationUiSettingsForm, ApplicationVisualisationForm
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.notification import EmailNotificationModel
 
from kallithea.model.meta import Session
 
from kallithea.lib.utils2 import str2bool, safe_unicode
 
log = logging.getLogger(__name__)
 

	
 

	
 
class SettingsController(BaseController):
 
    """REST Controller styled on the Atom Publishing Protocol"""
 
    # To properly map this controller, ensure your config/routing.py
 
    # file has a resource setup:
 
    #     map.resource('setting', 'settings', controller='admin/settings',
 
    #         path_prefix='/admin', name_prefix='admin_')
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(SettingsController, self).__before__()
 

	
 
    def _get_hg_ui_settings(self):
 
        ret = Ui.query().all()
 

	
 
        if not ret:
 
            raise Exception('Could not get application ui settings !')
 
        settings = {}
 
        for each in ret:
 
            k = each.ui_key
 
            v = each.ui_value
 
            if k == '/':
 
                k = 'root_path'
 

	
 
            if k == 'push_ssl':
 
                v = str2bool(v)
 

	
 
            if k.find('.') != -1:
 
                k = k.replace('.', '_')
 

	
 
            if each.ui_section in ['hooks', 'extensions']:
 
                v = each.ui_active
 

	
 
            settings[each.ui_section + '_' + k] = v
 
        return settings
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_vcs(self):
 
        """GET /admin/settings: All items in the collection"""
 
        # url('admin_settings')
 
        c.active = 'vcs'
 
        if request.POST:
 
            application_form = ApplicationUiSettingsForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                     render('admin/settings/settings.html'),
 
                     defaults=errors.value,
 
                     errors=errors.error_dict or {},
 
                     prefix_error=False,
 
                     encoding="UTF-8",
 
                     force_defaults=False)
 

	
 
            try:
 
                sett = Ui.get_by_key('push_ssl')
 
                sett.ui_value = form_result['web_push_ssl']
 
                Session().add(sett)
 
                if c.visual.allow_repo_location_change:
 
                    sett = Ui.get_by_key('/')
 
                    sett.ui_value = form_result['paths_root_path']
 
                    Session().add(sett)
 

	
 
                #HOOKS
 
                sett = Ui.get_by_key(Ui.HOOK_UPDATE)
 
                sett.ui_active = form_result['hooks_changegroup_update']
 
                Session().add(sett)
 

	
 
                sett = Ui.get_by_key(Ui.HOOK_REPO_SIZE)
 
                sett.ui_active = form_result['hooks_changegroup_repo_size']
 
                Session().add(sett)
 

	
 
                sett = Ui.get_by_key(Ui.HOOK_PUSH)
 
                sett.ui_active = form_result['hooks_changegroup_push_logger']
 
                Session().add(sett)
 

	
 
                sett = Ui.get_by_key(Ui.HOOK_PULL)
 
                sett.ui_active = form_result['hooks_outgoing_pull_logger']
 

	
 
                Session().add(sett)
 

	
 
                ## EXTENSIONS
 
                sett = Ui.get_by_key('largefiles')
 
                if not sett:
 
                    #make one if it's not there !
 
                    sett = Ui()
 
                    sett.ui_key = 'largefiles'
 
                    sett.ui_section = 'extensions'
 
                sett.ui_active = form_result['extensions_largefiles']
 
                Session().add(sett)
 

	
 
                sett = Ui.get_by_key('hgsubversion')
 
                if not sett:
 
                    #make one if it's not there !
 
                    sett = Ui()
 
                    sett.ui_key = 'hgsubversion'
 
                    sett.ui_section = 'extensions'
 

	
 
                sett.ui_active = form_result['extensions_hgsubversion']
 
                if sett.ui_active:
 
                    try:
 
                        import hgsubversion  # pragma: no cover
 
                    except ImportError:
 
                        raise HgsubversionImportError
 
                Session().add(sett)
 

	
 
#                sett = Ui.get_by_key('hggit')
 
#                if not sett:
 
#                    #make one if it's not there !
 
#                    sett = Ui()
 
#                    sett.ui_key = 'hggit'
 
#                    sett.ui_section = 'extensions'
 
#
 
#                sett.ui_active = form_result['extensions_hggit']
 
#                Session().add(sett)
 

	
 
                Session().commit()
 

	
 
                h.flash(_('Updated VCS settings'), category='success')
 

	
 
            except HgsubversionImportError:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Unable to activate hgsubversion support. '
 
                          'The "hgsubversion" library is missing'),
 
                        category='error')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred while updating '
 
                          'application settings'), category='error')
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_mapping(self):
 
        """GET /admin/settings/mapping: All items in the collection"""
 
        # url('admin_settings_mapping')
 
        c.active = 'mapping'
 
        if request.POST:
 
            rm_obsolete = request.POST.get('destroy', False)
 
            install_git_hooks = request.POST.get('hooks', False)
 
            overwrite_git_hooks = request.POST.get('hooks_overwrite', False);
 
            invalidate_cache = request.POST.get('invalidate', False)
 
            log.debug('rescanning repo location with destroy obsolete=%s, '
 
                      'install git hooks=%s and '
 
                      'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks))
 

	
 
            if invalidate_cache:
 
                log.debug('invalidating all repositories cache')
 
                for repo in Repository.get_all():
 
                    ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
 
                    ScmModel().mark_for_invalidation(repo.repo_name)
 

	
 
            filesystem_repos = ScmModel().repo_scan()
 
            added, removed = repo2db_mapper(filesystem_repos, rm_obsolete,
 
                                            install_git_hooks=install_git_hooks,
 
                                            user=c.authuser.username,
 
                                            overwrite_git_hooks=overwrite_git_hooks)
 
            h.flash(h.literal(_('Repositories successfully rescanned. Added: %s. Removed: %s.') %
 
                (', '.join(h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name))
 
                 for repo_name in added) or '-',
 
                 ', '.join(h.escape(safe_unicode(repo_name)) for repo_name in removed) or '-')),
 
                category='success')
 
            return redirect(url('admin_settings_mapping'))
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_global(self):
 
        """GET /admin/settings/global: All items in the collection"""
 
        # url('admin_settings_global')
 
        c.active = 'global'
 
        if request.POST:
 
            application_form = ApplicationSettingsForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                    render('admin/settings/settings.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
 
                    encoding="UTF-8",
 
                    force_defaults=False)
 

	
 
            try:
 
                sett1 = Setting.create_or_update('title',
 
                                            form_result['title'])
 
                Session().add(sett1)
 

	
 
                sett2 = Setting.create_or_update('realm',
 
                                            form_result['realm'])
 
                Session().add(sett2)
 

	
 
                sett3 = Setting.create_or_update('ga_code',
 
                                            form_result['ga_code'])
 
                Session().add(sett3)
 

	
 
                sett4 = Setting.create_or_update('captcha_public_key',
 
                                    form_result['captcha_public_key'])
 
                Session().add(sett4)
 

	
 
                sett5 = Setting.create_or_update('captcha_private_key',
 
                                    form_result['captcha_private_key'])
 
                Session().add(sett5)
 

	
 
                Session().commit()
 
                set_app_settings(config)
 
                h.flash(_('Updated application settings'), category='success')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred while updating '
 
                          'application settings'),
 
                          category='error')
 

	
 
            return redirect(url('admin_settings_global'))
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_visual(self):
 
        """GET /admin/settings/visual: All items in the collection"""
 
        # url('admin_settings_visual')
 
        c.active = 'visual'
 
        if request.POST:
 
            application_form = ApplicationVisualisationForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                    render('admin/settings/settings.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
 
                    encoding="UTF-8",
 
                    force_defaults=False)
 

	
 
            try:
 
                settings = [
 
                    ('show_public_icon', 'show_public_icon', 'bool'),
 
                    ('show_private_icon', 'show_private_icon', 'bool'),
 
                    ('stylify_metatags', 'stylify_metatags', 'bool'),
 
                    ('repository_fields', 'repository_fields', 'bool'),
 
                    ('dashboard_items', 'dashboard_items', 'int'),
 
                    ('admin_grid_items', 'admin_grid_items', 'int'),
 
                    ('show_version', 'show_version', 'bool'),
 
                    ('use_gravatar', 'use_gravatar', 'bool'),
 
                    ('gravatar_url', 'gravatar_url', 'unicode'),
 
                    ('clone_uri_tmpl', 'clone_uri_tmpl', 'unicode'),
 
                ]
 
                for setting, form_key, type_ in settings:
 
                    sett = Setting.create_or_update(setting,
 
                                        form_result[form_key], type_)
 
                    Session().add(sett)
 

	
 
                Session().commit()
 
                set_app_settings(config)
 
                h.flash(_('Updated visualisation settings'),
 
                        category='success')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during updating '
 
                          'visualisation settings'),
 
                        category='error')
 

	
 
            return redirect(url('admin_settings_visual'))
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_email(self):
 
        """GET /admin/settings/email: All items in the collection"""
 
        # url('admin_settings_email')
 
        c.active = 'email'
 
        if request.POST:
 
            test_email = request.POST.get('test_email')
 
            test_email_subj = 'Kallithea test email'
 
            test_body = ('Kallithea Email test, '
 
                               'Kallithea version: %s' % c.kallithea_version)
 
            if not test_email:
 
                h.flash(_('Please enter email address'), category='error')
 
                return redirect(url('admin_settings_email'))
 

	
 
            test_email_txt_body = EmailNotificationModel()\
 
                .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT,
 
                                'txt', body=test_body)
 
            test_email_html_body = EmailNotificationModel()\
 
                .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT,
 
                                'html', body=test_body)
 

	
 
            recipients = [test_email] if test_email else None
 

	
 
            run_task(tasks.send_email, recipients, test_email_subj,
 
                     test_email_txt_body, test_email_html_body)
 

	
 
            h.flash(_('Send email task created'), category='success')
 
            return redirect(url('admin_settings_email'))
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        import kallithea
 
        c.ini = kallithea.CONFIG
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_hooks(self):
 
        """GET /admin/settings/hooks: All items in the collection"""
 
        # url('admin_settings_hooks')
 
        c.active = 'hooks'
 
        if request.POST:
 
            if c.visual.allow_custom_hooks_settings:
 
                ui_key = request.POST.get('new_hook_ui_key')
 
                ui_value = request.POST.get('new_hook_ui_value')
 

	
 
                hook_id = request.POST.get('hook_id')
 

	
 
                try:
 
                    ui_key = ui_key and ui_key.strip()
 
                    if ui_value and ui_key:
 
                        Ui.create_or_update_hook(ui_key, ui_value)
 
                        h.flash(_('Added new hook'), category='success')
 
                    elif hook_id:
 
                        Ui.delete(hook_id)
 
                        Session().commit()
 

	
 
                    # check for edits
 
                    update = False
 
                    _d = request.POST.dict_of_lists()
 
                    for k, v in zip(_d.get('hook_ui_key', []),
 
                                    _d.get('hook_ui_value_new', [])):
 
                        Ui.create_or_update_hook(k, v)
 
                        update = True
 

	
 
                    if update:
 
                        h.flash(_('Updated hooks'), category='success')
 
                    Session().commit()
 
                except Exception:
 
                    log.error(traceback.format_exc())
 
                    h.flash(_('Error occurred during hook creation'),
 
                            category='error')
 

	
 
                return redirect(url('admin_settings_hooks'))
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        c.hooks = Ui.get_builtin_hooks()
 
        c.custom_hooks = Ui.get_custom_hooks()
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_search(self):
 
        """GET /admin/settings/search: All items in the collection"""
 
        # url('admin_settings_search')
 
        c.active = 'search'
 
        if request.POST:
 
            repo_location = self._get_hg_ui_settings()['paths_root_path']
 
            full_index = request.POST.get('full_index', False)
 
            run_task(tasks.whoosh_index, repo_location, full_index)
 
            h.flash(_('Whoosh reindex task scheduled'), category='success')
 
            return redirect(url('admin_settings_search'))
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_system(self):
 
        """GET /admin/settings/system: All items in the collection"""
 
        # url('admin_settings_system')
 
        c.active = 'system'
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        import kallithea
 
        c.ini = kallithea.CONFIG
 
        c.update_url = defaults.get('update_url')
 
        server_info = Setting.get_server_info()
 
        for key, val in server_info.iteritems():
 
            setattr(c, key, val)
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def settings_system_update(self):
 
        """GET /admin/settings/system/updates: All items in the collection"""
 
        # url('admin_settings_system_update')
 
        import json
 
        import urllib2
 
        from kallithea.lib.verlib import NormalizedVersion
 
        from kallithea import __version__
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 
        _update_url = defaults.get('update_url', '')
 
        _update_url = "" # FIXME: disabled
 

	
 
        _err = lambda s: '<div style="color:#ff8888; padding:4px 0px">%s</div>' % (s)
 
        try:
 
            import kallithea
 
            ver = kallithea.__version__
 
            log.debug('Checking for upgrade on `%s` server', _update_url)
 
            opener = urllib2.build_opener()
 
            opener.addheaders = [('User-agent', 'Kallithea-SCM/%s' % ver)]
 
            response = opener.open(_update_url)
 
            response_data = response.read()
 
            data = json.loads(response_data)
 
        except urllib2.URLError as e:
 
            log.error(traceback.format_exc())
 
            return _err('Failed to contact upgrade server: %r' % e)
 
        except ValueError as e:
 
            log.error(traceback.format_exc())
 
            return _err('Bad data sent from update server')
 

	
 
        latest = data['versions'][0]
 

	
 
        c.update_url = _update_url
 
        c.latest_data = latest
 
        c.latest_ver = latest['version']
 
        c.cur_ver = __version__
 
        c.should_upgrade = False
 

	
 
        if NormalizedVersion(c.latest_ver) > NormalizedVersion(c.cur_ver):
 
            c.should_upgrade = True
 
        c.important_notices = latest['general']
 

	
 
        return render('admin/settings/settings_system_update.html'),
kallithea/model/db.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.db
 
~~~~~~~~~~~~~~~~~~
 

	
 
Database Models for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 08, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import time
 
import logging
 
import datetime
 
import traceback
 
import hashlib
 
import collections
 
import functools
 

	
 
from sqlalchemy import *
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
 
from beaker.cache import cache_region, region_invalidate
 
from webob.exc import HTTPNotFound
 

	
 
from pylons.i18n.translation import lazy_ugettext as _
 

	
 
from kallithea import DB_PREFIX
 
from kallithea.lib.exceptions import DefaultUserException
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.utils.helpers import get_scm
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
 
    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int, \
 
    get_clone_url, urlreadable
 
from kallithea.lib.compat import json
 
from kallithea.lib.caching_query import FromCache
 

	
 
from kallithea.model.meta import Base, Session
 

	
 
URL_SEP = '/'
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
 

	
 

	
 
class BaseModel(object):
 
    """
 
    Base Model for all classes
 
    """
 

	
 
    @classmethod
 
    def _get_keys(cls):
 
        """return column names for this model """
 
        return class_mapper(cls).c.keys()
 

	
 
    def get_dict(self):
 
        """
 
        return dict with keys and values corresponding
 
        to this model data """
 

	
 
        d = {}
 
        for k in self._get_keys():
 
            d[k] = getattr(self, k)
 

	
 
        # also use __json__() if present to get additional fields
 
        _json_attr = getattr(self, '__json__', None)
 
        if _json_attr:
 
            # update with attributes from __json__
 
            if callable(_json_attr):
 
                _json_attr = _json_attr()
 
            for k, val in _json_attr.iteritems():
 
                d[k] = val
 
        return d
 

	
 
    def get_appstruct(self):
 
        """return list with keys and values tuples corresponding
 
        to this model data """
 

	
 
        l = []
 
        for k in self._get_keys():
 
            l.append((k, getattr(self, k),))
 
        return l
 

	
 
    def populate_obj(self, populate_dict):
 
        """populate model with data from given populate_dict"""
 

	
 
        for k in self._get_keys():
 
            if k in populate_dict:
 
                setattr(self, k, populate_dict[k])
 

	
 
    @classmethod
 
    def query(cls):
 
        return Session().query(cls)
 

	
 
    @classmethod
 
    def get(cls, id_):
 
        if id_:
 
            return cls.query().get(id_)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        try:
 
            id_ = int(id_)
 
        except (TypeError, ValueError):
 
            raise HTTPNotFound
 

	
 
        res = cls.query().get(id_)
 
        if res is None:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def getAll(cls):
 
        # deprecated and left for backward compatibility
 
        return cls.get_all()
 

	
 
    @classmethod
 
    def get_all(cls):
 
        return cls.query().all()
 

	
 
    @classmethod
 
    def delete(cls, id_):
 
        obj = cls.query().get(id_)
 
        Session().delete(obj)
 

	
 
    def __repr__(self):
 
        if hasattr(self, '__unicode__'):
 
            # python repr needs to return str
 
            try:
 
                return safe_str(self.__unicode__())
 
            except UnicodeDecodeError:
 
                pass
 
        return '<DB:%s>' % (self.__class__.__name__)
 

	
 

	
 
class Setting(Base, BaseModel):
 
    __tablename__ = DB_PREFIX + 'settings'
 

	
 
    __table_args__ = (
 
        UniqueConstraint('app_settings_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    SETTINGS_TYPES = {
 
        'str': safe_str,
 
        'int': safe_int,
 
        'unicode': safe_unicode,
 
        'bool': str2bool,
 
        'list': functools.partial(aslist, sep=',')
 
    }
 
    DEFAULT_UPDATE_URL = ''
 

	
 
    app_settings_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    app_settings_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False), nullable=True, unique=None, default=None)
 
    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 

	
 
    def __init__(self, key='', val='', type='unicode'):
 
        self.app_settings_name = key
 
        self.app_settings_value = val
 
        self.app_settings_type = type
 

	
 
    @validates('_app_settings_value')
 
    def validate_settings_value(self, key, val):
 
        assert type(val) == unicode
 
        return val
 

	
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        _type = self.app_settings_type
 
        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
 
        return converter(v)
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use unicode in app_settings_value
 

	
 
        :param val:
 
        """
 
        self._app_settings_value = safe_unicode(val)
 

	
 
    @hybrid_property
 
    def app_settings_type(self):
 
        return self._app_settings_type
 

	
 
    @app_settings_type.setter
 
    def app_settings_type(self, val):
 
        if val not in self.SETTINGS_TYPES:
 
            raise Exception('type must be one of %s got %s'
 
                            % (self.SETTINGS_TYPES.keys(), val))
 
        self._app_settings_type = val
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s[%s]')>" % (
 
            self.__class__.__name__,
 
            self.app_settings_name, self.app_settings_value, self.app_settings_type
 
        )
 

	
 
    @classmethod
 
    def get_by_name(cls, key):
 
        return cls.query()\
 
            .filter(cls.app_settings_name == key).scalar()
 

	
 
    @classmethod
 
    def get_by_name_or_create(cls, key, val='', type='unicode'):
 
        res = cls.get_by_name(key)
 
        if res is None:
 
            res = cls(key, val, type)
 
        return res
 

	
 
    @classmethod
 
    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
 
        """
 
        Creates or updates Kallithea setting. If updates are triggered, it will only
 
        update parameters that are explicitly set. Optional instance will be skipped.
 

	
 
        :param key:
 
        :param val:
 
        :param type:
 
        :return:
 
        """
 
        res = cls.get_by_name(key)
 
        if res is None:
 
            val = Optional.extract(val)
 
            type = Optional.extract(type)
 
            res = cls(key, val, type)
 
        else:
 
            res.app_settings_name = key
 
            if not isinstance(val, Optional):
 
                # update if set
 
                res.app_settings_value = val
 
            if not isinstance(type, Optional):
 
                # update if set
 
                res.app_settings_type = type
 
        return res
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if ret is None:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings[each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_auth_plugins(cls, cache=False):
 
        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
 
        return auth_plugins
 

	
 
    @classmethod
 
    def get_auth_settings(cls, cache=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('auth_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd[row.app_settings_name] = row.app_settings_value
 
        return fd
 

	
 
    @classmethod
 
    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('default_')).all()
 
        fd = {}
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 
    @classmethod
 
    def get_server_info(cls):
 
        import pkg_resources
 
        import platform
 
        import kallithea
 
        from kallithea.lib.utils import check_git_version
 
        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
 
        info = {
 
            'modules': sorted(mods, key=lambda k: k[0].lower()),
 
            'py_version': platform.python_version(),
 
            'platform': safe_unicode(platform.platform()),
 
            'kallithea_version': kallithea.__version__,
 
            'git_version': safe_unicode(check_git_version()),
 
            'git_path': kallithea.CONFIG.get('git_path')
 
        }
 
        return info
 

	
 

	
 
class Ui(Base, BaseModel):
 
    __tablename__ = DB_PREFIX + 'ui'
 
    __table_args__ = (
 
        UniqueConstraint('ui_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'changegroup.push_logger'
 
    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
 
    HOOK_PULL = 'outgoing.pull_logger'
 
    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
 

	
 
    ui_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    ui_section = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    ui_key = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    ui_value = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    ui_active = Column(Boolean(), nullable=True, unique=None, default=True)
 

	
 
    # def __init__(self, section='', key='', value=''):
 
    #     self.ui_section = section
 
    #     self.ui_key = key
 
    #     self.ui_value = value
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key).scalar()
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key) or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session().add(new_ui)
 

	
 
    def __repr__(self):
 
        return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
 
                                    self.ui_key, self.ui_value)
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        UniqueConstraint('username'), UniqueConstraint('email'),
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    DEFAULT_USER = 'default'
 
    DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
 

	
 
    user_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    username = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    password = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    active = Column(Boolean(), nullable=True, unique=None, default=True)
 
    admin = Column(Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("firstname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    lastname = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    last_login = Column(DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    extern_type = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    extern_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    api_key = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column(Boolean(), nullable=False, unique=None, default=True)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    _user_data = Column("user_data", LargeBinary(), nullable=True)  # JSON data
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    repo_groups = relationship('RepoGroup')
 
    user_groups = relationship('UserGroup')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UserGroupMember', cascade='all')
 

	
 
    notifications = relationship('UserNotification', cascade='all')
 
    # notifications assigned to this user
 
    user_created_notifications = relationship('Notification', cascade='all')
 
    # comments created by this user
 
    user_comments = relationship('ChangesetComment', cascade='all')
 
    #extra emails for this user
 
    user_emails = relationship('UserEmailMap', cascade='all')
 
    #extra API keys
 
    user_api_keys = relationship('UserApiKeys', cascade='all')
 

	
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 
    @property
 
    def firstname(self):
 
        # alias for future
 
        return self.name
 

	
 
    @property
 
    def emails(self):
 
        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
 
        return [self.email] + [x.email for x in other]
 

	
 
    @property
 
    def api_keys(self):
 
        other = UserApiKeys.query().filter(UserApiKeys.user==self).all()
 
        return [self.api_key] + [x.api_key for x in other]
 

	
 
    @property
 
    def ip_addresses(self):
 
        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
 
        return [x.ip_addr for x in ret]
 

	
 
    @property
 
    def full_name(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name_or_username(self):
 
        """
 
        Show full name.
 
        If full name is not set, fall back to username.
 
        """
 
        return ('%s %s' % (self.firstname, self.lastname)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_name_and_username(self):
 
        """
 
        Show full name and username as 'Firstname Lastname (username)'.
 
        If full name is not set, fall back to username.
 
        """
 
        return ('%s %s (%s)' % (self.firstname, self.lastname, self.username)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def AuthUser(self):
 
        """
 
        Returns instance of AuthUser for this user
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        return AuthUser(dbuser=self)
 

	
 
    @hybrid_property
 
    def user_data(self):
 
        if not self._user_data:
 
            return {}
 

	
 
        try:
 
            return json.loads(self._user_data)
 
        except TypeError:
 
            return {}
 

	
 
    @user_data.setter
 
    def user_data(self, val):
 
        try:
 
            self._user_data = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                      self.user_id, self.username)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_, allow_default=True):
 
        '''
 
        Overridden version of BaseModel.get_or_404, with an extra check on
 
        the default user.
 
        '''
 
        user = super(User, cls).get_or_404(id_)
 
        if allow_default == False:
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException
 
        return user
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.username.ilike(username))
 
        else:
 
            q = cls.query().filter(cls.username == username)
 

	
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(username)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key, cache=False, fallback=True):
 
        if len(api_key) != 40 or not api_key.isalnum():
 
            return None
 

	
 
        q = cls.query().filter(cls.api_key == api_key)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_api_key_%s" % api_key))
 
        res = q.scalar()
 

	
 
        if fallback and not res:
 
            #fallback to additional keys
 
            _res = UserApiKeys.query()\
 
                .filter(UserApiKeys.api_key == api_key)\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time()))\
 
                .first()
 
            if _res:
 
                res = _res.user
 
        return res
 

	
 
    @classmethod
 
    def get_by_email(cls, email, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.email.ilike(email))
 
        else:
 
            q = cls.query().filter(cls.email == email)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_email_key_%s" % email))
 

	
 
        ret = q.scalar()
 
        if ret is None:
 
            q = UserEmailMap.query()
 
            # try fetching in alternate email map
 
            if case_insensitive:
 
                q = q.filter(UserEmailMap.email.ilike(email))
 
            else:
 
                q = q.filter(UserEmailMap.email == email)
 
            q = q.options(joinedload(UserEmailMap.user))
 
            if cache:
 
                q = q.options(FromCache("sql_cache_short",
 
                                        "get_email_map_key_%s" % email))
 
            ret = getattr(q.scalar(), 'user', None)
 

	
 
        return ret
 

	
 
    @classmethod
 
    def get_from_cs_author(cls, author):
 
        """
 
        Tries to get User objects out of commit author string
 

	
 
        :param author:
 
        """
 
        from kallithea.lib.helpers import email, author_name
 
        # Valid email in the attribute passed, see if they're in the system
 
        _email = email(author)
 
        if _email:
 
            user = cls.get_by_email(_email, case_insensitive=True)
 
            if user is not None:
 
                return user
 
        # Maybe we can match by username?
 
        _author = author_name(author)
 
        user = cls.get_by_username(_author, case_insensitive=True)
 
        if user is not None:
 
            return user
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 
        self.last_login = datetime.datetime.now()
 
        Session().add(self)
 
        log.debug('updated user %s lastlogin', self.username)
 

	
 
    @classmethod
 
    def get_first_admin(cls):
 
        user = User.query().filter(User.admin == True).first()
 
        if user is None:
 
            raise Exception('Missing administrative account!')
 
        return user
 

	
 
    @classmethod
 
    def get_default_user(cls, cache=False):
 
        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
 
        if user is None:
 
            raise Exception('Missing default account!')
 
        return user
 

	
 
    def get_api_data(self, details=False):
 
        """
 
        Common function for generating user related data for API
 
        """
 
        user = self
 
        data = dict(
 
            user_id=user.user_id,
 
            username=user.username,
 
            firstname=user.name,
 
            lastname=user.lastname,
 
            email=user.email,
 
            emails=user.emails,
 
            active=user.active,
 
            admin=user.admin,
 
        )
 
        if details:
 
            data.update(dict(
 
                extern_type=user.extern_type,
 
                extern_name=user.extern_name,
 
                api_key=user.api_key,
 
                api_keys=user.api_keys,
 
                last_login=user.last_login,
 
                ip_addresses=user.ip_addresses
 
                ))
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
            full_name=self.full_name,
 
            full_name_or_username=self.full_name_or_username,
 
            short_contact=self.short_contact,
 
            full_contact=self.full_contact
 
        )
 
        data.update(self.get_api_data())
 
        return data
 

	
 

	
 
class UserApiKeys(Base, BaseModel):
 
    __tablename__ = 'user_api_keys'
 
    __table_args__ = (
 
        Index('uak_api_key_idx', 'api_key'),
 
        Index('uak_api_key_expires_idx', 'api_key', 'expires'),
 
        UniqueConstraint('api_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    __mapper_args__ = {}
 

	
 
    user_api_key_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    api_key = Column(String(255, convert_unicode=False), nullable=False, unique=True)
 
    description = Column(UnicodeText(1024))
 
    expires = Column(Float(53), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    user = relationship('User')
 

	
 
    @property
 
    def expired(self):
 
        if self.expires == -1:
 
            return False
 
        return time.time() > self.expires
 

	
 

	
 
class UserEmailMap(Base, BaseModel):
 
    __tablename__ = 'user_email_map'
 
    __table_args__ = (
 
        Index('uem_email_idx', 'email'),
 
        UniqueConstraint('email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    __mapper_args__ = {}
 

	
 
    email_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    user = relationship('User')
 

	
 
    @validates('_email')
 
    def validate_email(self, key, email):
 
        # check if this email is not main one
 
        main_email = Session().query(User).filter(User.email == email).scalar()
 
        if main_email is not None:
 
            raise AttributeError('email %s is present is user table' % email)
 
        return email
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 

	
 
class UserIpMap(Base, BaseModel):
 
    __tablename__ = 'user_ip_map'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'ip_addr'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    __mapper_args__ = {}
 

	
 
    ip_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    ip_addr = Column(String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    active = Column(Boolean(), nullable=True, unique=None, default=True)
 
    user = relationship('User')
 

	
 
    @classmethod
 
    def _get_ip_range(cls, ip_addr):
 
        from kallithea.lib import ipaddr
 
        net = ipaddr.IPNetwork(address=ip_addr)
 
        return [str(net.network), str(net.broadcast)]
 

	
 
    def __json__(self):
 
        return dict(
 
          ip_addr=self.ip_addr,
 
          ip_range=self._get_ip_range(self.ip_addr)
 
        )
 

	
 
    def __unicode__(self):
 
        return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
 
                                            self.user_id, self.ip_addr)
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    user_log_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    username = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    repository_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    user_ip = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    action = Column(UnicodeText(1200000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    action_date = Column(DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                      self.repository_name,
 
                                      self.action)
 

	
 
    @property
 
    def action_as_day(self):
 
        return datetime.date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository', cascade='')
 

	
 

	
 
class UserGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    users_group_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    users_group_name = Column(String(255, convert_unicode=False), nullable=False, unique=True, default=None)
 
    user_group_description = Column(String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    users_group_active = Column(Boolean(), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    _group_data = Column("group_data", LargeBinary(), nullable=True)  # JSON data
 

	
 
    members = relationship('UserGroupMember', cascade="all, delete-orphan")
 
    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
 
    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 
    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
 
    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
 

	
 
    user = relationship('User')
 

	
 
    @hybrid_property
 
    def group_data(self):
 
        if not self._group_data:
 
            return {}
 

	
 
        try:
 
            return json.loads(self._group_data)
 
        except TypeError:
 
            return {}
 

	
 
    @group_data.setter
 
    def group_data(self, val):
 
        try:
 
            self._group_data = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                      self.users_group_id,
 
                                      self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False,
 
                          case_insensitive=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            q = cls.query().filter(cls.users_group_name == group_name)
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get(cls, user_group_id, cache=False):
 
        user_group = cls.query()
 
        if cache:
 
            user_group = user_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % user_group_id))
 
        return user_group.get(user_group_id)
 

	
 
    def get_api_data(self, with_members=True):
 
        user_group = self
 

	
 
        data = dict(
 
            users_group_id=user_group.users_group_id,
 
            group_name=user_group.users_group_name,
 
            group_description=user_group.user_group_description,
 
            active=user_group.users_group_active,
 
            owner=user_group.user.username,
 
        )
 
        if with_members:
 
            members = []
 
            for user in user_group.members:
 
                user = user.user
 
                members.append(user.get_api_data())
 
            data['members'] = members
 

	
 
        return data
 

	
 

	
 
class UserGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    users_group_member_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    users_group = relationship('UserGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 

	
 
class RepositoryField(Base, BaseModel):
 
    __tablename__ = 'repositories_fields'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
 

	
 
    repo_field_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 
    field_key = Column(String(250, convert_unicode=False))
 
    field_label = Column(String(1024, convert_unicode=False), nullable=False)
 
    field_value = Column(String(10000, convert_unicode=False), nullable=False)
 
    field_desc = Column(String(1024, convert_unicode=False), nullable=False)
 
    field_type = Column(String(255), nullable=False, unique=None)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repository = relationship('Repository')
 

	
 
    @property
 
    def field_key_prefixed(self):
 
        return 'ex_%s' % self.field_key
 

	
 
    @classmethod
 
    def un_prefix_key(cls, key):
 
        if key.startswith(cls.PREFIX):
 
            return key[len(cls.PREFIX):]
 
        return key
 

	
 
    @classmethod
 
    def get_by_key_name(cls, key, repo):
 
        row = cls.query()\
 
                .filter(cls.repository == repo)\
 
                .filter(cls.field_key == key).scalar()
 
        return row
 

	
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        UniqueConstraint('repo_name'),
 
        Index('r_repo_name_idx', 'repo_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
 
    DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
 

	
 
    STATE_CREATED = 'repo_state_created'
 
    STATE_PENDING = 'repo_state_pending'
 
    STATE_ERROR = 'repo_state_error'
 

	
 
    repo_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    repo_name = Column(String(255, convert_unicode=False), nullable=False, unique=True, default=None)
 
    repo_state = Column(String(255), nullable=True)
 

	
 
    clone_uri = Column(String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    repo_type = Column(String(255, convert_unicode=False), nullable=False, unique=False, default=None)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column(Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column(String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    created_on = Column(DateTime(timezone=False), nullable=False, unique=None, default=datetime.datetime.now)
 
    updated_on = Column(DateTime(timezone=False), nullable=False, unique=None, default=datetime.datetime.now)
 
    _landing_revision = Column("landing_revision", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
 
    enable_locking = Column(Boolean(), nullable=False, unique=None, default=False)
 
    _locked = Column("locked", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
 

	
 
    fork_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
 
                             cascade='all')
 
    extra_fields = relationship('RepositoryField',
 
                                cascade="all, delete-orphan")
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete-orphan")
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
 
                                   safe_unicode(self.repo_name))
 

	
 
    @hybrid_property
 
    def landing_rev(self):
 
        # always should return [rev_type, rev]
 
        if self._landing_revision:
 
            _rev_info = self._landing_revision.split(':')
 
            if len(_rev_info) < 2:
 
                _rev_info.insert(0, 'rev')
 
            return [_rev_info[0], _rev_info[1]]
 
        return [None, None]
 

	
 
    @landing_rev.setter
 
    def landing_rev(self, val):
 
        if ':' not in val:
 
            raise ValueError('value must be delimited with `:` and consist '
 
                             'of <rev_type>:<rev>, got %s instead' % val)
 
        self._landing_revision = val
 

	
 
    @hybrid_property
 
    def locked(self):
 
        # always should return [user_id, timelocked]
 
        if self._locked:
 
            _lock_info = self._locked.split(':')
 
            return int(_lock_info[0]), _lock_info[1]
 
        return [None, None]
 

	
 
    @locked.setter
 
    def locked(self, val):
 
        if val and isinstance(val, (list, tuple)):
 
            self._locked = ':'.join(map(str, val))
 
        else:
 
            self._locked = None
 

	
 
    @hybrid_property
 
    def changeset_cache(self):
 
        try:
 
            cs_cache = json.loads(self._changeset_cache) # might raise on bad data
 
            cs_cache['raw_id'] # verify data, raise exception on error
 
            return cs_cache
 
        except (TypeError, KeyError, ValueError):
 
            return EmptyChangeset().__json__()
 

	
 
    @changeset_cache.setter
 
    def changeset_cache(self, val):
 
        try:
 
            self._changeset_cache = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def normalize_repo_name(cls, repo_name):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        database using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path where all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if lock_time is not None:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    @property
 
    def clone_uri_hidden(self):
 
        clone_uri = self.clone_uri
 
        if clone_uri:
 
            import urlobject
 
            url_obj = urlobject.URLObject(self.clone_uri)
 
            if url_obj.password:
 
                clone_uri = url_obj.with_password('*****')
 
        return clone_uri
 

	
 
    def clone_url(self, **override):
 
        import kallithea.lib.helpers as h
 
        qualified_home_url = h.canonical_url('home')
 

	
 
        uri_tmpl = None
 
        if 'with_id' in override:
 
            uri_tmpl = self.DEFAULT_CLONE_URI_ID
 
            del override['with_id']
 

	
 
        if 'uri_tmpl' in override:
 
            uri_tmpl = override['uri_tmpl']
 
            del override['uri_tmpl']
 

	
 
        # we didn't override our tmpl from **overrides
 
        if not uri_tmpl:
 
            uri_tmpl = self.DEFAULT_CLONE_URI
 
            try:
 
                from pylons import tmpl_context as c
 
                uri_tmpl = c.clone_uri_tmpl
 
            except AttributeError:
 
                # in any case if we call this outside of request context,
 
                # ie, not having tmpl_context set up
 
                pass
 

	
 
        return get_clone_url(uri_tmpl=uri_tmpl,
 
                             qualified_home_url=qualified_home_url,
 
                             repo_name=self.repo_name,
 
                             repo_id=self.repo_id, **override)
 

	
 
    def set_state(self, state):
 
        self.repo_state = state
 
        Session().add(self)
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s',
 
                      self.repo_name, cs_cache)
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('changeset_cache for %s already up to date with %s',
 
                      self.repo_name, cs_cache['raw_id'])
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions is not None:
 
            if not revisions:
 
                return [] # don't use sql 'in' on empty set
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions):
 
        """
 
        Returns statuses for this repository.
 
        PRs without any votes do _not_ show up as unreviewed.
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 
        if not revisions:
 
            return {}
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)\
 
            .filter(ChangesetStatus.revision.in_(revisions))
 

	
 
        grouped = {}
 
        for stat in statuses.all():
 
            pr_id = pr_nice_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_nice_id = PullRequest.make_nice_id(pr_id)
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo, pr_nice_id]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object', rn)
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting scm_instance of %s from cache', rn)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 

	
 
        alias = get_scm(repo_full_path)[0]
 
        log.debug('Creating instance of %s repository from %s',
 
                  alias, repo_full_path)
 
        backend = get_backend(alias)
 

	
 
        if alias == 'hg':
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 
    def __json__(self):
 
        return dict(landing_rev = self.landing_rev)
 

	
 
class RepoGroup(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        UniqueConstraint('group_name', 'group_parent_id'),
 
        CheckConstraint('group_id != group_parent_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    __mapper_args__ = {'order_by': 'group_name'}
 

	
 
    SEP = ' &raquo; '
 

	
 
    group_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    group_name = Column(String(255, convert_unicode=False), nullable=False, unique=True, default=None)
 
    group_parent_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column(String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    enable_locking = Column(Boolean(), nullable=False, unique=None, default=False)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 
    user = relationship('User')
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                      self.group_name)
 

	
 
    @classmethod
 
    def _generate_choice(cls, repo_group):
 
        """Return tuple with group_id and name as html literal"""
 
        from webhelpers.html import literal
 
        if repo_group is None:
 
            return (-1, u'-- %s --' % _('top level'))
 
        return repo_group.group_id, literal(cls.SEP.join(repo_group.full_path_splitted))
 

	
 
    @classmethod
 
    def groups_choices(cls, groups):
 
        """Return tuples with group_id and name as html literal."""
 
        return sorted((cls._generate_choice(g) for g in groups),
 
                      key=lambda c: c[1].split(cls.SEP))
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 10
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinite loops
 
                log.error(('more than %s parents found for group %s, stopping '
 
                           'recursive parent fetching' % (parents_recursion_limit, self)))
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(RepoGroup.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(RepoGroup.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query()\
 
                .filter(Repository.group == self)\
 
                .order_by(Repository.repo_name)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 
    def _recursive_objects(self, include_repos=True):
 
        all_ = []
 

	
 
        def _get_members(root_gr):
 
            if include_repos:
 
                for r in root_gr.repositories:
 
                    all_.append(r)
 
            childs = root_gr.children.all()
 
            if childs:
 
                for gr in childs:
 
                    all_.append(gr)
 
                    _get_members(gr)
 

	
 
        _get_members(self)
 
        return [self] + all_
 

	
 
    def recursive_groups_and_repos(self):
 
        """
 
        Recursive return all groups, with repositories in those groups
 
        """
 
        return self._recursive_objects()
 

	
 
    def recursive_groups(self):
 
        """
 
        Returns all children groups for this group including children of children
 
        """
 
        return self._recursive_objects(include_repos=False)
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return RepoGroup.url_sep().join(path_prefix + [group_name])
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating api data
 

	
 
        """
 
        group = self
 
        data = dict(
 
            group_id=group.group_id,
 
            group_name=group.group_name,
 
            group_description=group.group_description,
 
            parent_group=group.parent_group.group_name if group.parent_group else None,
 
            repositories=[x.repo_name for x in group.repositories],
 
            owner=group.user.username
 
        )
 
        return data
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = (
 
        Index('p_perm_name_idx', 'permission_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    PERMS = [
 
        ('hg.admin', _('Kallithea Administrator')),
 

	
 
        ('repository.none', _('Default user has no access to new repositories')),
 
        ('repository.read', _('Default user has read access to new repositories')),
 
        ('repository.write', _('Default user has write access to new repositories')),
 
        ('repository.admin', _('Default user has admin access to new repositories')),
 

	
 
        ('group.none', _('Default user has no access to new repository groups')),
 
        ('group.read', _('Default user has read access to new repository groups')),
 
        ('group.write', _('Default user has write access to new repository groups')),
 
        ('group.admin', _('Default user has admin access to new repository groups')),
 

	
 
        ('usergroup.none', _('Default user has no access to new user groups')),
 
        ('usergroup.read', _('Default user has read access to new user groups')),
 
        ('usergroup.write', _('Default user has write access to new user groups')),
 
        ('usergroup.admin', _('Default user has admin access to new user groups')),
 

	
 
        ('hg.repogroup.create.false', _('Only admins can create repository groups')),
 
        ('hg.repogroup.create.true', _('Non-admins can create repository groups')),
 

	
 
        ('hg.usergroup.create.false', _('Only admins can create user groups')),
 
        ('hg.usergroup.create.true', _('Non-admins can create user groups')),
 

	
 
        ('hg.create.none', _('Only admins can create top level repositories')),
 
        ('hg.create.repository', _('Non-admins can create top level repositories')),
 

	
 
        ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
 
        ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
 

	
 
        ('hg.fork.none', _('Only admins can fork repositories')),
 
        ('hg.fork.repository', _('Non-admins can can fork repositories')),
 

	
 
        ('hg.register.none', _('Registration disabled')),
 
        ('hg.register.manual_activate', _('User registration with manual account activation')),
 
        ('hg.register.auto_activate', _('User registration with automatic account activation')),
 

	
 
        ('hg.extern_activate.manual', _('Manual activation of external account')),
 
        ('hg.extern_activate.auto', _('Automatic activation of external account')),
 
    ]
 

	
 
    #definition of system default permissions for DEFAULT user
 
    DEFAULT_USER_PERMISSIONS = [
 
        'repository.read',
 
        'group.read',
 
        'usergroup.read',
 
        'hg.create.repository',
 
        'hg.create.write_on_repogroup.true',
 
        'hg.fork.repository',
 
        'hg.register.manual_activate',
 
        'hg.extern_activate.auto',
 
    ]
 

	
 
    # defines which permissions are more important higher the more important
 
    # Weight defines which permissions are more important.
 
    # The higher number the more important.
 
    PERM_WEIGHTS = {
 
        'repository.none': 0,
 
        'repository.read': 1,
 
        'repository.write': 3,
 
        'repository.admin': 4,
 

	
 
        'group.none': 0,
 
        'group.read': 1,
 
        'group.write': 3,
 
        'group.admin': 4,
 

	
 
        'usergroup.none': 0,
 
        'usergroup.read': 1,
 
        'usergroup.write': 3,
 
        'usergroup.admin': 4,
 

	
 
        'hg.repogroup.create.false': 0,
 
        'hg.repogroup.create.true': 1,
 

	
 
        'hg.usergroup.create.false': 0,
 
        'hg.usergroup.create.true': 1,
 

	
 
        'hg.fork.none': 0,
 
        'hg.fork.repository': 1,
 

	
 
        'hg.create.none': 0,
 
        'hg.create.repository': 1
 
    }
 

	
 
    permission_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    permission_name = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    permission_longname = Column(String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__, self.permission_id, self.permission_name
 
        )
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
    @classmethod
 
    def get_default_perms(cls, default_user_id):
 
        q = Session().query(UserRepoToPerm, Repository, cls)\
 
         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
 
         .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_group_perms(cls, default_user_id):
 
        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
 
         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_user_group_perms(cls, default_user_id):
 
        q = Session().query(UserUserGroupToPerm, UserGroup, cls)\
 
         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
 
         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserUserGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 

	
 
class UserRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    repo_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, repository, permission):
 
        n = cls()
 
        n.user = user
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<%s => %s >' % (self.user, self.repository)
 

	
 

	
 
class UserUserGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_user_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_user_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    user_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, user_group, permission):
 
        n = cls()
 
        n.user = user
 
        n.user_group = user_group
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<%s => %s >' % (self.user, self.user_group)
 

	
 

	
 
class UserToPerm(Base, BaseModel):
 
    __tablename__ = 'user_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission')
 

	
 
    def __unicode__(self):
 
        return u'<%s => %s >' % (self.user, self.permission)
 

	
 

	
 
class UserGroupRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    users_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
    @classmethod
 
    def create(cls, users_group, repository, permission):
 
        n = cls()
 
        n.users_group = users_group
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
 

	
 

	
 
class UserGroupUserGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_group_user_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
 
        CheckConstraint('target_user_group_id != user_group_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_group_user_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    target_user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    user_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 

	
 
    target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
 
    user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, target_user_group, user_group, permission):
 
        n = cls()
 
        n.target_user_group = target_user_group
 
        n.user_group = user_group
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
 

	
 

	
 
class UserGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'permission_id',),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    users_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    group = relationship('RepoGroup')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, repository_group, permission):
 
        n = cls()
 
        n.user = user
 
        n.group = repository_group
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 

	
 
class UserGroupRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'group_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    users_group_repo_group_to_perm_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    users_group_id = Column(Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column(Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    group = relationship('RepoGroup')
 

	
 
    @classmethod
 
    def create(cls, user_group, repository_group, permission):
 
        n = cls()
 
        n.users_group = user_group
 
        n.group = repository_group
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 

	
 
class Statistics(Base, BaseModel):
 
    __tablename__ = 'statistics'
 
    __table_args__ = (
 
         UniqueConstraint('repository_id'),
 
         {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
          'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    stat_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
 
    stat_on_revision = Column(Integer(), nullable=False)
 
    commit_activity = Column(LargeBinary(1000000), nullable=False)#JSON data
 
    commit_activity_combined = Column(LargeBinary(), nullable=False)#JSON data
 
    languages = Column(LargeBinary(1000000), nullable=False)#JSON data
 

	
 
    repository = relationship('Repository', single_parent=True)
 

	
 

	
 
class UserFollowing(Base, BaseModel):
 
    __tablename__ = 'user_followings'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'follows_repository_id'),
 
        UniqueConstraint('user_id', 'follows_user_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    user_following_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
 
    follows_user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    follows_from = Column(DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 

	
 
    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
 
    follows_repository = relationship('Repository', order_by='Repository.repo_name')
 

	
 
    @classmethod
 
    def get_repo_followers(cls, repo_id):
 
        return cls.query().filter(cls.follows_repo_id == repo_id)
 

	
 

	
 
class CacheInvalidation(Base, BaseModel):
 
    __tablename__ = 'cache_invalidation'
 
    __table_args__ = (
 
        UniqueConstraint('cache_key'),
 
        Index('key_idx', 'cache_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    # cache_id, not used
 
    cache_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    # cache_key as created by _get_cache_key
 
    cache_key = Column(String(255, convert_unicode=False))
 
    # cache_args is a repo_name
 
    cache_args = Column(String(255, convert_unicode=False))
 
    # instance sets cache_active True when it is caching, other instances set
 
    # cache_active to False to indicate that this cache is invalid
 
    cache_active = Column(Boolean(), nullable=True, unique=None, default=False)
 

	
 
    def __init__(self, cache_key, repo_name=''):
 
        self.cache_key = cache_key
 
        self.cache_args = repo_name
 
        self.cache_active = False
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s[%s]')>" % (
 
            self.__class__.__name__,
 
            self.cache_id, self.cache_key, self.cache_active)
 

	
 
    def _cache_key_partition(self):
 
        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
 
        return prefix, repo_name, suffix
 

	
 
    def get_prefix(self):
 
        """
 
        get prefix that might have been used in _get_cache_key to
 
        generate self.cache_key. Only used for informational purposes
 
        in repo_edit.html.
 
        """
 
        # prefix, repo_name, suffix
 
        return self._cache_key_partition()[0]
 

	
 
    def get_suffix(self):
 
        """
 
        get suffix that might have been used in _get_cache_key to
 
        generate self.cache_key. Only used for informational purposes
 
        in repo_edit.html.
 
        """
 
        # prefix, repo_name, suffix
 
        return self._cache_key_partition()[2]
 

	
 
    @classmethod
 
    def clear_cache(cls):
 
        """
 
        Delete all cache keys from database.
 
        Should only be run when all instances are down and all entries thus stale.
 
        """
 
        cls.query().delete()
 
        Session().commit()
 

	
 
    @classmethod
 
    def _get_cache_key(cls, key):
 
        """
 
        Wrapper for generating a unique cache key for this instance and "key".
 
        key must / will start with a repo_name which will be stored in .cache_args .
 
        """
 
        import kallithea
 
        prefix = kallithea.CONFIG.get('instance_id', '')
 
        return "%s%s" % (prefix, key)
 

	
 
    @classmethod
 
    def set_invalidate(cls, repo_name, delete=False):
 
    def set_invalidate(cls, repo_name):
 
        """
 
        Mark all caches of a repo as invalid in the database.
 
        """
 
        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 
        log.debug('for repo %s got %s invalidation objects',
 
                  safe_str(repo_name), inv_objs)
 

	
 
        for inv_obj in inv_objs:
 
            log.debug('marking %s key for invalidation based on repo_name=%s',
 
                      inv_obj, safe_str(repo_name))
 
            if delete:
 
                Session().delete(inv_obj)
 
            else:
 
                inv_obj.cache_active = False
 
                Session().add(inv_obj)
 
            Session().delete(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
 
        """
 
        Mark this cache key as active and currently cached.
 
        Return True if the existing cache registration still was valid.
 
        Return False to indicate that it had been invalidated and caches should be refreshed.
 
        """
 

	
 
        key = (repo_name + '_' + kind) if kind else repo_name
 
        cache_key = cls._get_cache_key(key)
 

	
 
        if valid_cache_keys and cache_key in valid_cache_keys:
 
            return True
 

	
 
        inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
 
        if not inv_obj:
 
            inv_obj = CacheInvalidation(cache_key, repo_name)
 
        if inv_obj.cache_active:
 
            return True
 
        inv_obj.cache_active = True
 
        Session().add(inv_obj)
 
        Session().commit()
 
        return False
 

	
 
    @classmethod
 
    def get_valid_cache_keys(cls):
 
        """
 
        Return opaque object with information of which caches still are valid
 
        and can be used without checking for invalidation.
 
        """
 
        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
 

	
 

	
 
class ChangesetComment(Base, BaseModel):
 
    __tablename__ = 'changeset_comments'
 
    __table_args__ = (
 
        Index('cc_revision_idx', 'revision'),
 
        Index('cc_pull_request_id_idx', 'pull_request_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    comment_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    revision = Column(String(40))
 
    pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'))
 
    line_no = Column(Unicode(10))
 
    f_path = Column(Unicode(1000))
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
 
    text = Column(UnicodeText(25000), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    author = relationship('User')
 
    repo = relationship('Repository')
 
    # status_change is frequently used directly in templates - make it a lazy
 
    # join to avoid fetching each related ChangesetStatus on demand.
 
    # There will only be one ChangesetStatus referencing each comment so the join will not explode.
 
    status_change = relationship('ChangesetStatus',
 
                                 cascade="all, delete-orphan", lazy='joined')
 
    pull_request = relationship('PullRequest')
 

	
 
    @classmethod
 
    def get_users(cls, revision=None, pull_request_id=None):
 
        """
 
        Returns user associated with this ChangesetComment. ie those
 
        who actually commented
 

	
 
        :param cls:
 
        :param revision:
 
        """
 
        q = Session().query(User)\
 
                .join(ChangesetComment.author)
 
        if revision is not None:
 
            q = q.filter(cls.revision == revision)
 
        elif pull_request_id is not None:
 
            q = q.filter(cls.pull_request_id == pull_request_id)
 
        return q.all()
 

	
 
    def url(self):
 
        anchor = "comment-%s" % self.comment_id
 
        import kallithea.lib.helpers as h
 
        if self.revision:
 
            return h.url('changeset_home', repo_name=self.repo.repo_name, revision=self.revision, anchor=anchor)
 
        elif self.pull_request_id is not None:
 
            return self.pull_request.url(anchor=anchor)
 

	
 
class ChangesetStatus(Base, BaseModel):
 
    __tablename__ = 'changeset_statuses'
 
    __table_args__ = (
 
        Index('cs_revision_idx', 'revision'),
 
        Index('cs_version_idx', 'version'),
 
        Index('cs_pull_request_id_idx', 'pull_request_id'),
 
        Index('cs_changeset_comment_id_idx', 'changeset_comment_id'),
 
        Index('cs_pull_request_id_user_id_version_idx', 'pull_request_id', 'user_id', 'version'),
 
        UniqueConstraint('repo_id', 'revision', 'version'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
 
    STATUS_APPROVED = 'approved'
 
    STATUS_REJECTED = 'rejected'
 
    STATUS_UNDER_REVIEW = 'under_review'
 

	
 
    STATUSES = [
 
        (STATUS_NOT_REVIEWED, _("Not reviewed")),  # (no icon) and default
 
        (STATUS_APPROVED, _("Approved")),
 
        (STATUS_REJECTED, _("Rejected")),
 
        (STATUS_UNDER_REVIEW, _("Under review")),
 
    ]
 

	
 
    changeset_status_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    revision = Column(String(40), nullable=False)
 
    status = Column(String(128), nullable=False, default=DEFAULT)
 
    changeset_comment_id = Column(Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
 
    modified_at = Column(DateTime(), nullable=False, default=datetime.datetime.now)
 
    version = Column(Integer(), nullable=False, default=0)
 
    pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 

	
 
    author = relationship('User')
 
    repo = relationship('Repository')
 
    comment = relationship('ChangesetComment')
 
    pull_request = relationship('PullRequest')
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__,
 
            self.status, self.author
 
        )
 

	
 
    @classmethod
 
    def get_status_lbl(cls, value):
 
        return dict(cls.STATUSES).get(value)
 

	
 
    @property
 
    def status_lbl(self):
 
        return ChangesetStatus.get_status_lbl(self.status)
 

	
 

	
 
class PullRequest(Base, BaseModel):
 
    __tablename__ = 'pull_requests'
 
    __table_args__ = (
 
        Index('pr_org_repo_id_idx', 'org_repo_id'),
 
        Index('pr_other_repo_id_idx', 'other_repo_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    title = Column(Unicode(255), nullable=True)
 
    description = Column(UnicodeText(10240))
 
    status = Column(Unicode(255), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column(Unicode(255), nullable=False)
 
    other_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column(Unicode(255), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = safe_unicode(':'.join(val))
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    owner = relationship('User')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    def user_review_status(self, user_id):
 
        """Return the user's latest status votes on PR"""
 
        # note: no filtering on repo - that would be redundant
 
        status = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.pull_request == self)\
 
            .filter(ChangesetStatus.user_id == user_id)\
 
            .order_by(ChangesetStatus.version)\
 
            .first()
 
        return str(status.status) if status else ''
 

	
 
    @classmethod
 
    def make_nice_id(cls, pull_request_id):
 
        '''Return pull request id nicely formatted for displaying'''
 
        return '#%s' % pull_request_id
 

	
 
    def nice_id(self):
 
        '''Return the id of this pull request, nicely formatted for displaying'''
 
        return self.make_nice_id(self.pull_request_id)
 

	
 
    def __json__(self):
 
        return dict(
 
            revisions=self.revisions
 
        )
 

	
 
    def url(self, **kwargs):
 
        canonical = kwargs.pop('canonical', None)
 
        import kallithea.lib.helpers as h
 
        b = self.org_ref_parts[1]
 
        if b != self.other_ref_parts[1]:
 
            s = '/_/' + b
 
        else:
 
            s = '/_/' + self.title
 
        kwargs['extra'] = urlreadable(s)
 
        if canonical:
 
            return h.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
 
                                   pull_request_id=self.pull_request_id, **kwargs)
 
        return h.url('pullrequest_show', repo_name=self.other_repo.repo_name,
 
                     pull_request_id=self.pull_request_id, **kwargs)
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    subject = Column(Unicode(512), nullable=True)
 
    body = Column(UnicodeText(50000), nullable=True)
 
    created_by = Column(Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(255))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', cascade="all, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()
 
                .filter(UserNotification.notification == self)
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            assoc.user_id = u.user_id
 
            Session().add(assoc)
 
        Session().add(notification)
 
        Session().flush() # assign notificaiton.notification_id
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from kallithea.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column(Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column(Boolean, default=False)
 
    sent_on = Column(DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User')
 
    notification = relationship('Notification')
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class Gist(Base, BaseModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 
    DEFAULT_FILENAME = u'gistfile1.txt'
 

	
 
    gist_id = Column(Integer(), nullable=False, unique=True, primary_key=True)
 
    gist_access_id = Column(Unicode(250))
 
    gist_description = Column(UnicodeText(1024))
 
    gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
 
    gist_expires = Column(Float(53), nullable=False)
 
    gist_type = Column(Unicode(128), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    def __repr__(self):
 
        return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if res is None:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        import kallithea
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path where all gists are stored
 

	
 
        :param cls:
 
        """
 
        from kallithea.model.gist import GIST_STORE_LOC
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == URL_SEP)
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return os.path.join(q.one().ui_value, GIST_STORE_LOC)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating gist related data for API
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(*map(safe_str,
 
                                          [base_path, self.gist_access_id])))
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    repository_id = Column(String(250), nullable=False, unique=True, primary_key=True)
 
    repository_path = Column(Text)
 
    version = Column(Integer)
kallithea/model/scm.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.scm
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Scm model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import re
 
import time
 
import traceback
 
import logging
 
import cStringIO
 
import pkg_resources
 
from os.path import join as jn
 

	
 
from sqlalchemy import func
 
from pylons.i18n.translation import _
 

	
 
import kallithea
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.exceptions import RepositoryError
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea import BACKENDS
 
from kallithea.lib import helpers as h
 
from kallithea.lib.utils2 import safe_str, safe_unicode, get_server_url,\
 
    _set_extras
 
from kallithea.lib.auth import HasRepoPermissionAny, HasRepoGroupPermissionAny,\
 
    HasUserGroupPermissionAny, HasPermissionAny, HasPermissionAll
 
from kallithea.lib.utils import get_filesystem_repos, make_ui, \
 
    action_logger
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, Ui, CacheInvalidation, \
 
    UserFollowing, UserLog, User, RepoGroup, PullRequest
 
from kallithea.lib.hooks import log_push_action
 
from kallithea.lib.exceptions import NonRelativePathError, IMCCommitError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
 

	
 

	
 
class CachedRepoList(object):
 
    """
 
    Cached repo list. Uses super-fast in-memory cache after initialization.
 
    """
 

	
 
    def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
 
        self.db_repo_list = db_repo_list
 
        self.repos_path = repos_path
 
        self.order_by = order_by
 
        self.reversed = (order_by or '').startswith('-')
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write',
 
                        'repository.admin']
 
        self.perm_set = perm_set
 

	
 
    def __len__(self):
 
        return len(self.db_repo_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        # pre-propagated valid_cache_keys to save executing select statements
 
        # for each repo
 
        valid_cache_keys = CacheInvalidation.get_valid_cache_keys()
 

	
 
        for dbr in self.db_repo_list:
 
            scmr = dbr.scm_instance_cached(valid_cache_keys)
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            try:
 
                last_change = scmr.last_change
 
                tip = h.get_changeset_safe(scmr, 'tip')
 
            except Exception:
 
                log.error(
 
                    '%s this repository is present in database but it '
 
                    'cannot be created as an scm instance, org_exc:%s'
 
                    % (dbr.repo_name, traceback.format_exc())
 
                )
 
                continue
 

	
 
            tmp_d = {}
 
            tmp_d['name'] = dbr.repo_name
 
            tmp_d['name_sort'] = tmp_d['name'].lower()
 
            tmp_d['raw_name'] = tmp_d['name'].lower()
 
            tmp_d['description'] = dbr.description
 
            tmp_d['description_sort'] = tmp_d['description'].lower()
 
            tmp_d['last_change'] = last_change
 
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
 
            tmp_d['tip'] = tip.raw_id
 
            tmp_d['tip_sort'] = tip.revision
 
            tmp_d['rev'] = tip.revision
 
            tmp_d['contact'] = dbr.user.full_contact
 
            tmp_d['contact_sort'] = tmp_d['contact']
 
            tmp_d['owner_sort'] = tmp_d['contact']
 
            tmp_d['repo_archives'] = list(scmr._get_archives())
 
            tmp_d['last_msg'] = tip.message
 
            tmp_d['author'] = tip.author
 
            tmp_d['dbrepo'] = dbr.get_dict()
 
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
 
            yield tmp_d
 

	
 

	
 
class SimpleCachedRepoList(CachedRepoList):
 
    """
 
    Lighter version of CachedRepoList without the scm initialisation
 
    """
 

	
 
    def __iter__(self):
 
        for dbr in self.db_repo_list:
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            tmp_d = {
 
                'name': dbr.repo_name,
 
                'dbrepo': dbr.get_dict(),
 
                'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
 
            }
 
            yield tmp_d
 

	
 

	
 
class _PermCheckIterator(object):
 
    def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None):
 
        """
 
        Creates iterator from given list of objects, additionally
 
        checking permission for them from perm_set var
 

	
 
        :param obj_list: list of db objects
 
        :param obj_attr: attribute of object to pass into perm_checker
 
        :param perm_set: list of permissions to check
 
        :param perm_checker: callable to check permissions against
 
        """
 
        self.obj_list = obj_list
 
        self.obj_attr = obj_attr
 
        self.perm_set = perm_set
 
        self.perm_checker = perm_checker
 
        self.extra_kwargs = extra_kwargs or {}
 

	
 
    def __len__(self):
 
        return len(self.obj_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        for db_obj in self.obj_list:
 
            # check permission at this level
 
            name = getattr(db_obj, self.obj_attr, None)
 
            if not self.perm_checker(*self.perm_set)(
 
                    name, self.__class__.__name__, **self.extra_kwargs):
 
                continue
 

	
 
            yield db_obj
 

	
 

	
 
class RepoList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write', 'repository.admin']
 

	
 
        super(RepoList, self).__init__(obj_list=db_repo_list,
 
                    obj_attr='repo_name', perm_set=perm_set,
 
                    perm_checker=HasRepoPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class RepoGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['group.read', 'group.write', 'group.admin']
 

	
 
        super(RepoGroupList, self).__init__(obj_list=db_repo_group_list,
 
                    obj_attr='group_name', perm_set=perm_set,
 
                    perm_checker=HasRepoGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class UserGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
 

	
 
        super(UserGroupList, self).__init__(obj_list=db_user_group_list,
 
                    obj_attr='users_group_name', perm_set=perm_set,
 
                    perm_checker=HasUserGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class ScmModel(BaseModel):
 
    """
 
    Generic Scm Model
 
    """
 

	
 
    def __get_repo(self, instance):
 
        cls = Repository
 
        if isinstance(instance, cls):
 
            return instance
 
        elif isinstance(instance, int) or safe_str(instance).isdigit():
 
            return cls.get(instance)
 
        elif isinstance(instance, basestring):
 
            return cls.get_by_repo_name(instance)
 
        elif instance is not None:
 
            raise Exception('given object must be int, basestr or Instance'
 
                            ' of %s got %s' % (type(cls), type(instance)))
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 

	
 
        return q.ui_value
 

	
 
    def repo_scan(self, repos_path=None):
 
        """
 
        Listing of repositories in given path. This path should not be a
 
        repository itself. Return a dictionary of repository objects
 

	
 
        :param repos_path: path to directory containing repositories
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s', repos_path)
 

	
 
        baseui = make_ui('db')
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path, recursive=True):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for kallithea
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories', len(repos))
 
        return repos
 

	
 
    def get_repos(self, all_repos=None, sort_key=None, simple=False):
 
        """
 
        Get all repos from db and for each repo create its
 
        backend instance and fill that backed with information from database
 

	
 
        :param all_repos: list of repository names as strings
 
            give specific repositories list, good for filtering
 

	
 
        :param sort_key: initial sorting of repos
 
        :param simple: use SimpleCachedList - one without the SCM info
 
        """
 
        if all_repos is None:
 
            all_repos = self.sa.query(Repository)\
 
                        .filter(Repository.group_id == None)\
 
                        .order_by(func.lower(Repository.repo_name)).all()
 
        if simple:
 
            repo_iter = SimpleCachedRepoList(all_repos,
 
                                             repos_path=self.repos_path,
 
                                             order_by=sort_key)
 
        else:
 
            repo_iter = CachedRepoList(all_repos,
 
                                       repos_path=self.repos_path,
 
                                       order_by=sort_key)
 

	
 
        return repo_iter
 

	
 
    def get_repo_groups(self, all_groups=None):
 
        if all_groups is None:
 
            all_groups = RepoGroup.query()\
 
                .filter(RepoGroup.group_parent_id == None).all()
 
        return [x for x in RepoGroupList(all_groups)]
 

	
 
    def mark_for_invalidation(self, repo_name, delete=False):
 
    def mark_for_invalidation(self, repo_name):
 
        """
 
        Mark caches of this repo invalid in the database.
 

	
 
        :param repo_name: the repo for which caches should be marked invalid
 
        """
 
        CacheInvalidation.set_invalidate(repo_name, delete=delete)
 
        CacheInvalidation.set_invalidate(repo_name)
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is not None:
 
            repo.update_changeset_cache()
 

	
 
    def toggle_following_repo(self, follow_repo_id, user_id):
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repo_id == follow_repo_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                action_logger(UserTemp(user_id),
 
                              'stopped_following_repo',
 
                              RepoTemp(follow_repo_id))
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_repo_id = follow_repo_id
 
            self.sa.add(f)
 

	
 
            action_logger(UserTemp(user_id),
 
                          'started_following_repo',
 
                          RepoTemp(follow_repo_id))
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def toggle_following_user(self, follow_user_id, user_id):
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user_id == follow_user_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_user_id = follow_user_id
 
            self.sa.add(f)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def is_following_repo(self, repo_name, user_id, cache=False):
 
        r = self.sa.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repository == r)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def is_following_user(self, username, user_id, cache=False):
 
        u = User.get_by_username(username)
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user == u)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def get_followers(self, repo):
 
        repo = self._get_repo(repo)
 

	
 
        return self.sa.query(UserFollowing)\
 
                .filter(UserFollowing.follows_repository == repo).count()
 

	
 
    def get_forks(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(Repository)\
 
                .filter(Repository.fork == repo).count()
 

	
 
    def get_pull_requests(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(PullRequest)\
 
                .filter(PullRequest.other_repo == repo)\
 
                .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
 

	
 
    def mark_as_fork(self, repo, fork, user):
 
        repo = self.__get_repo(repo)
 
        fork = self.__get_repo(fork)
 
        if fork and repo.repo_id == fork.repo_id:
 
            raise Exception("Cannot set repository as fork of itself")
 

	
 
        if fork and repo.repo_type != fork.repo_type:
 
            raise RepositoryError("Cannot set repository as fork of repository with other type")
 

	
 
        repo.fork = fork
 
        self.sa.add(repo)
 
        return repo
 

	
 
    def _handle_rc_scm_extras(self, username, repo_name, repo_alias,
 
                              action=None):
 
        from kallithea import CONFIG
 
        from kallithea.lib.base import _get_ip_addr
 
        try:
 
            from pylons import request
 
            environ = request.environ
 
        except TypeError:
 
            # we might use this outside of request context, let's fake the
 
            # environ data
 
            from webob import Request
 
            environ = Request.blank('').environ
 
        extras = {
 
            'ip': _get_ip_addr(environ),
 
            'username': username,
 
            'action': action or 'push_local',
 
            'repository': repo_name,
 
            'scm': repo_alias,
 
            'config': CONFIG['__file__'],
 
            'server_url': get_server_url(environ),
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        _set_extras(extras)
 

	
 
    def _handle_push(self, repo, username, action, repo_name, revisions):
 
        """
 
        Triggers push action hooks
 

	
 
        :param repo: SCM repo
 
        :param username: username who pushes
 
        :param action: push/push_local/push_remote
 
        :param repo_name: name of repo
 
        :param revisions: list of revisions that we pushed
 
        """
 
        self._handle_rc_scm_extras(username, repo_name, repo_alias=repo.alias)
 
        _scm_repo = repo._repo
 
        # trigger push hook
 
        if repo.alias == 'hg':
 
            log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0])
 
        elif repo.alias == 'git':
 
            log_push_action(None, _scm_repo, _git_revs=revisions)
 

	
 
    def _get_IMC_module(self, scm_type):
 
        """
 
        Returns InMemoryCommit class based on scm_type
 

	
 
        :param scm_type:
 
        """
 
        if scm_type == 'hg':
 
            from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset
 
            return MercurialInMemoryChangeset
 

	
 
        if scm_type == 'git':
 
            from kallithea.lib.vcs.backends.git import GitInMemoryChangeset
 
            return GitInMemoryChangeset
 

	
 
        raise Exception('Invalid scm_type, must be one of hg,git got %s'
 
                        % (scm_type,))
 

	
 
    def pull_changes(self, repo, username):
 
        """
 
        Pull from "clone URL".
 
        """
 
        dbrepo = self.__get_repo(repo)
 
        clone_uri = dbrepo.clone_uri
 
        if not clone_uri:
 
            raise Exception("This repository doesn't have a clone uri")
 

	
 
        repo = dbrepo.scm_instance
 
        repo_name = dbrepo.repo_name
 
        try:
 
            if repo.alias == 'git':
 
                repo.fetch(clone_uri)
 
                # git doesn't really have something like post-fetch action
 
                # we fake that now. #TODO: extract fetched revisions somehow
 
                # here
 
                self._handle_push(repo,
 
                                  username=username,
 
                                  action='push_remote',
 
                                  repo_name=repo_name,
 
                                  revisions=[])
 
            else:
 
                self._handle_rc_scm_extras(username, dbrepo.repo_name,
 
                                           repo.alias, action='push_remote')
 
                repo.pull(clone_uri)
 

	
 
            self.mark_for_invalidation(repo_name)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, author, message,
 
                      content, f_path):
 
        """
 
        Commit a change to a single file
 

	
 
        :param repo: a db_repo.scm_instance
 
        """
 
        user = self._get_user(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        # message and author needs to be unicode
 
        # proper backend should then translate that into required type
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        imc = IMC(repo)
 
        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
 
        try:
 
            tip = imc.commit(message=message, author=author,
 
                             parents=[cs], branch=cs.branch)
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            raise IMCCommitError(str(e))
 
        finally:
 
            # always clear caches, if commit fails we want fresh object also
 
            self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def _sanitize_path(self, f_path):
 
        if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
 
            raise NonRelativePathError('%s is not an relative path' % f_path)
 
        if f_path:
 
            f_path = os.path.normpath(f_path)
 
        return f_path
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
        """
 
        Recursively walk root dir and return a set of all paths found.
 

	
 
        :param repo_name: name of repository
 
        :param revision: revision for which to list nodes
 
        :param root_path: root path to list
 
        :param flat: return as a list, if False returns a dict with description
 

	
 
        """
 
        _files = list()
 
        _dirs = list()
 
        try:
 
            _repo = self.__get_repo(repo_name)
 
            changeset = _repo.scm_instance.get_changeset(revision)
 
            root_path = root_path.lstrip('/')
 
            for topnode, dirs, files in changeset.walk(root_path):
 
                for f in files:
 
                    _files.append(f.path if flat else {"name": f.path,
 
                                                       "type": "file"})
 
                for d in dirs:
 
                    _dirs.append(d.path if flat else {"name": d.path,
 
                                                      "type": "dir"})
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            raise
 

	
 
        return _dirs, _files
 

	
 
    def create_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            content = nodes[f_path]['content']
 
            f_path = safe_str(f_path)
 
            # decoding here will force that we have proper encoded values
 
            # in any other case this will throw exceptions and deny commit
 
            if isinstance(content, (basestring,)):
 
                content = safe_str(content)
 
            elif isinstance(content, (file, cStringIO.OutputType,)):
 
                content = content.read()
 
            else:
 
                raise Exception('Content is of unrecognized type %s' % (
 
                    type(content)
 
                ))
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.add(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def update_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo. Again.
 
        """
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        imc_class = self._get_IMC_module(scm_instance.alias)
 
        imc = imc_class(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 

	
 
        # add multiple nodes
 
        for _filename, data in nodes.items():
 
            # new filename, can be renamed from the old one
 
            filename = self._sanitize_path(data['filename'])
 
            old_filename = self._sanitize_path(_filename)
 
            content = data['content']
 

	
 
            filenode = FileNode(old_filename, content=content)
 
            op = data['op']
 
            if op == 'add':
 
                imc.add(filenode)
 
            elif op == 'del':
 
                imc.remove(filenode)
 
            elif op == 'mod':
 
                if filename != old_filename:
 
                    #TODO: handle renames, needs vcs lib changes
 
                    imc.remove(filenode)
 
                    imc.add(FileNode(filename, content=content))
 
                else:
 
                    imc.change(filenode)
 

	
 
        # commit changes
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 

	
 
    def delete_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Deletes specified nodes from repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset after deletion
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            # content can be empty but for compatibility it allows same dicts
 
            # structure as add_nodes
 
            content = nodes[f_path].get('content')
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.remove(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def get_unread_journal(self):
 
        return self.sa.query(UserLog).count()
 

	
 
    def get_repo_landing_revs(self, repo=None):
 
        """
 
        Generates select option with tags branches and bookmarks (for hg only)
 
        grouped by type
 

	
 
        :param repo:
 
        """
 

	
 
        hist_l = []
 
        choices = []
 
        repo = self.__get_repo(repo)
 
        hist_l.append(['rev:tip', _('latest tip')])
 
        choices.append('rev:tip')
 
        if repo is None:
 
            return choices, hist_l
 

	
 
        repo = repo.scm_instance
 

	
 
        branches_group = ([(u'branch:%s' % k, k) for k, v in
 
                           repo.branches.iteritems()], _("Branches"))
 
        hist_l.append(branches_group)
 
        choices.extend([x[0] for x in branches_group[0]])
 

	
 
        if repo.alias == 'hg':
 
            bookmarks_group = ([(u'book:%s' % k, k) for k, v in
 
                                repo.bookmarks.iteritems()], _("Bookmarks"))
 
            hist_l.append(bookmarks_group)
 
            choices.extend([x[0] for x in bookmarks_group[0]])
 

	
 
        tags_group = ([(u'tag:%s' % k, k) for k, v in
 
                       repo.tags.iteritems()], _("Tags"))
 
        hist_l.append(tags_group)
 
        choices.extend([x[0] for x in tags_group[0]])
 

	
 
        return choices, hist_l
 

	
 
    def install_git_hooks(self, repo, force_create=False):
 
        """
 
        Creates a kallithea hook inside a git repository
 

	
 
        :param repo: Instance of VCS repo
 
        :param force_create: Create even if same name hook exists
 
        """
 

	
 
        loc = jn(repo.path, 'hooks')
 
        if not repo.bare:
 
            loc = jn(repo.path, '.git', 'hooks')
 
        if not os.path.isdir(loc):
 
            os.makedirs(loc)
 

	
 
        tmpl_post = pkg_resources.resource_string(
 
            'kallithea', jn('config', 'post_receive_tmpl.py')
 
        )
 
        tmpl_pre = pkg_resources.resource_string(
 
            'kallithea', jn('config', 'pre_receive_tmpl.py')
 
        )
 

	
 
        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
 
            _hook_file = jn(loc, '%s-receive' % h_type)
 
            has_hook = False
 
            log.debug('Installing git hook in repo %s', repo)
 
            if os.path.exists(_hook_file):
 
                # let's take a look at this hook, maybe it's kallithea ?
 
                log.debug('hook exists, checking if it is from kallithea')
 
                with open(_hook_file, 'rb') as f:
 
                    data = f.read()
 
                    matches = re.compile(r'(?:%s)\s*=\s*(.*)'
 
                                         % 'KALLITHEA_HOOK_VER').search(data)
 
                    if matches:
 
                        try:
 
                            ver = matches.groups()[0]
 
                            log.debug('got %s it is kallithea', ver)
 
                            has_hook = True
 
                        except Exception:
 
                            log.error(traceback.format_exc())
 
            else:
 
                # there is no hook in this dir, so we want to create one
 
                has_hook = True
 

	
 
            if has_hook or force_create:
 
                log.debug('writing %s hook file !', h_type)
 
                try:
 
                    with open(_hook_file, 'wb') as f:
 
                        tmpl = tmpl.replace('_TMPL_', kallithea.__version__)
 
                        f.write(tmpl)
 
                    os.chmod(_hook_file, 0755)
 
                except IOError as e:
 
                    log.error('error writing %s: %s', _hook_file, e)
 
            else:
 
                log.debug('skipping writing hook file')
 

	
 
def AvailableRepoGroupChoices(top_perms, repo_group_perms, extras=()):
 
    """Return group_id,string tuples with choices for all the repo groups where
 
    the user has the necessary permissions.
 

	
 
    Top level is -1.
 
    """
 
    groups = RepoGroup.query().all()
 
    if HasPermissionAll('hg.admin')('available repo groups'):
 
        groups.append(None)
 
    else:
 
        groups = list(RepoGroupList(groups, perm_set=repo_group_perms))
 
        if top_perms and HasPermissionAny(*top_perms)('available repo groups'):
 
            groups.append(None)
 
        for extra in extras:
 
            if not any(rg == extra for rg in groups):
 
                groups.append(extra)
 
    return RepoGroup.groups_choices(groups=groups)
kallithea/tests/other/manual_test_vcs_operations.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.tests.other.manual_test_vcs_operations
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Test suite for making push/pull operations.
 

	
 
Run it in two terminals::
 
 paster serve kallithea/tests/test.ini
 
 KALLITHEA_WHOOSH_TEST_DISABLE=1 KALLITHEA_NO_TMP_PATH=1 nosetests kallithea/tests/other/manual_test_vcs_operations.py
 

	
 
You must have git > 1.8.1 for tests to work fine
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Dec 30, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import re
 
import tempfile
 
import time
 
from os.path import join as jn
 

	
 
from tempfile import _RandomNameSequence
 
from subprocess import Popen, PIPE
 

	
 
from kallithea.tests import *
 
from kallithea.model.db import User, Repository, UserIpMap, CacheInvalidation
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.user import UserModel
 

	
 
DEBUG = True
 
HOST = '127.0.0.1:4999'  # test host
 

	
 

	
 
class Command(object):
 

	
 
    def __init__(self, cwd):
 
        self.cwd = cwd
 

	
 
    def execute(self, cmd, *args):
 
        """
 
        Runs command on the system with given ``args``.
 
        """
 

	
 
        command = cmd + ' ' + ' '.join(args)
 
        if DEBUG:
 
            print '*** CMD %s ***' % command
 
        p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
 
        stdout, stderr = p.communicate()
 
        if DEBUG:
 
            print 'stdout:', repr(stdout)
 
            print 'stderr:', repr(stderr)
 
        return stdout, stderr
 

	
 

	
 
def _get_tmp_dir():
 
    return tempfile.mkdtemp(prefix='rc_integration_test')
 

	
 

	
 
def _construct_url(repo, dest=None, **kwargs):
 
    if dest is None:
 
        #make temp clone
 
        dest = _get_tmp_dir()
 
    params = {
 
        'user': TEST_USER_ADMIN_LOGIN,
 
        'passwd': TEST_USER_ADMIN_PASS,
 
        'host': HOST,
 
        'cloned_repo': repo,
 
        'dest': dest
 
    }
 
    params.update(**kwargs)
 
    if params['user'] and params['passwd']:
 
        _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s %(dest)s' % params
 
    else:
 
        _url = 'http://(host)s/%(cloned_repo)s %(dest)s' % params
 
    return _url
 

	
 

	
 
def _add_files_and_push(vcs, DEST, **kwargs):
 
    """
 
    Generate some files, add it to DEST repo and push back
 
    vcs is git or hg and defines what VCS we want to make those files for
 

	
 
    :param vcs:
 
    :param DEST:
 
    """
 
    # commit some stuff into this repo
 
    cwd = path = jn(DEST)
 
    #added_file = jn(path, '%ssetupążźć.py' % _RandomNameSequence().next())
 
    added_file = jn(path, '%ssetup.py' % _RandomNameSequence().next())
 
    Command(cwd).execute('touch %s' % added_file)
 
    Command(cwd).execute('%s add %s' % (vcs, added_file))
 

	
 
    for i in xrange(kwargs.get('files_no', 3)):
 
        cmd = """echo 'added_line%s' >> %s""" % (i, added_file)
 
        Command(cwd).execute(cmd)
 
        author_str = 'User ǝɯɐᴎ <me@example.com>'
 
        if vcs == 'hg':
 
            cmd = """hg commit -m 'commited new %s' -u '%s' %s """ % (
 
                i, author_str, added_file
 
            )
 
        elif vcs == 'git':
 
            cmd = """EMAIL="me@example.com" git commit -m 'commited new %s' --author '%s' %s """ % (
 
                i, author_str, added_file
 
            )
 
        Command(cwd).execute(cmd)
 

	
 
    # PUSH it back
 
    _REPO = None
 
    if vcs == 'hg':
 
        _REPO = HG_REPO
 
    elif vcs == 'git':
 
        _REPO = GIT_REPO
 

	
 
    kwargs['dest'] = ''
 
    clone_url = _construct_url(_REPO, **kwargs)
 
    if 'clone_url' in kwargs:
 
        clone_url = kwargs['clone_url']
 
    stdout = stderr = None
 
    if vcs == 'hg':
 
        stdout, stderr = Command(cwd).execute('hg push --verbose', clone_url)
 
    elif vcs == 'git':
 
        stdout, stderr = Command(cwd).execute('git push --verbose', clone_url + " master")
 

	
 
    return stdout, stderr
 

	
 

	
 
def set_anonymous_access(enable=True):
 
    user = User.get_by_username(User.DEFAULT_USER)
 
    user.active = enable
 
    Session().add(user)
 
    Session().commit()
 
    print '\tanonymous access is now:', enable
 
    if enable != User.get_by_username(User.DEFAULT_USER).active:
 
        raise Exception('Cannot set anonymous access')
 

	
 

	
 
#==============================================================================
 
# TESTS
 
#==============================================================================
 

	
 

	
 
def _check_proper_git_push(stdout, stderr):
 
    #WTF Git stderr is output ?!
 
    assert 'fatal' not in stderr
 
    assert 'rejected' not in stderr
 
    assert 'Pushing to' in stderr
 
    assert 'master -> master' in stderr
 

	
 

	
 
class TestVCSOperations(BaseTestCase):
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        #DISABLE ANONYMOUS ACCESS
 
        set_anonymous_access(False)
 

	
 
    def setUp(self):
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        Repository.unlock(r)
 
        r.enable_locking = False
 
        Session().add(r)
 
        Session().commit()
 

	
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        Repository.unlock(r)
 
        r.enable_locking = False
 
        Session().add(r)
 
        Session().commit()
 

	
 
    def test_clone_hg_repo_by_admin(self):
 
        clone_url = _construct_url(HG_REPO)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        assert 'requesting all changes' in stdout
 
        assert 'adding changesets' in stdout
 
        assert 'adding manifests' in stdout
 
        assert 'adding file changes' in stdout
 

	
 
        assert stderr == ''
 

	
 
    def test_clone_git_repo_by_admin(self):
 
        clone_url = _construct_url(GIT_REPO)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        assert 'Cloning into' in stdout + stderr
 
        assert stderr == '' or stdout == ''
 

	
 
    def test_clone_wrong_credentials_hg(self):
 
        clone_url = _construct_url(HG_REPO, passwd='bad!')
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 
        assert 'abort: authorization failed' in stderr
 

	
 
    def test_clone_wrong_credentials_git(self):
 
        clone_url = _construct_url(GIT_REPO, passwd='bad!')
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 
        assert 'fatal: Authentication failed' in stderr
 

	
 
    def test_clone_git_dir_as_hg(self):
 
        clone_url = _construct_url(GIT_REPO)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 
        assert 'HTTP Error 404: Not Found' in stderr
 

	
 
    def test_clone_hg_repo_as_git(self):
 
        clone_url = _construct_url(HG_REPO)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 
        assert 'not found' in stderr
 

	
 
    def test_clone_non_existing_path_hg(self):
 
        clone_url = _construct_url('trololo')
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 
        assert 'HTTP Error 404: Not Found' in stderr
 

	
 
    def test_clone_non_existing_path_git(self):
 
        clone_url = _construct_url('trololo')
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 
        assert 'not found' in stderr
 

	
 
    def test_push_new_file_hg(self):
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(HG_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        stdout, stderr = _add_files_and_push('hg', DEST)
 

	
 
        assert 'pushing to' in stdout
 
        assert 'Repository size' in stdout
 
        assert 'Last revision is now' in stdout
 

	
 
    def test_push_new_file_git(self):
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(GIT_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        # commit some stuff into this repo
 
        stdout, stderr = _add_files_and_push('git', DEST)
 

	
 
        print [(x.repo_full_path,x.repo_path) for x in Repository.get_all()]
 
        _check_proper_git_push(stdout, stderr)
 

	
 
    def test_push_invalidates_cache_hg(self):
 
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
 
                                               ==HG_REPO).scalar()
 
        if not key:
 
            key = CacheInvalidation(HG_REPO, HG_REPO)
 

	
 
        key.cache_active = True
 
        Session().add(key)
 
        Session().commit()
 

	
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(HG_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        stdout, stderr = _add_files_and_push('hg', DEST, files_no=1)
 

	
 
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
 
                                               ==HG_REPO).one()
 
        self.assertEqual(key.cache_active, False)
 
                                               ==HG_REPO).all()
 
        self.assertEqual(key, [])
 

	
 
    def test_push_invalidates_cache_git(self):
 
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
 
                                               ==GIT_REPO).scalar()
 
        if not key:
 
            key = CacheInvalidation(GIT_REPO, GIT_REPO)
 

	
 
        key.cache_active = True
 
        Session().add(key)
 
        Session().commit()
 

	
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(GIT_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        # commit some stuff into this repo
 
        stdout, stderr = _add_files_and_push('git', DEST, files_no=1)
 
        _check_proper_git_push(stdout, stderr)
 

	
 
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
 
                                               ==GIT_REPO).one()
 
        print CacheInvalidation.get_all()
 
        self.assertEqual(key.cache_active, False)
 
                                               ==GIT_REPO).all()
 
        self.assertEqual(key, [])
 

	
 
    def test_push_wrong_credentials_hg(self):
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(HG_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        stdout, stderr = _add_files_and_push('hg', DEST, user='bad',
 
                                             passwd='name')
 

	
 
        assert 'abort: authorization failed' in stderr
 

	
 
    def test_push_wrong_credentials_git(self):
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(GIT_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        stdout, stderr = _add_files_and_push('git', DEST, user='bad',
 
                                             passwd='name')
 

	
 
        assert 'fatal: Authentication failed' in stderr
 

	
 
    def test_push_back_to_wrong_url_hg(self):
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(HG_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        stdout, stderr = _add_files_and_push('hg', DEST,
 
                                    clone_url='http://%s/tmp' % HOST)
 

	
 
        assert 'HTTP Error 404: Not Found' in stderr
 

	
 
    def test_push_back_to_wrong_url_git(self):
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(GIT_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        stdout, stderr = _add_files_and_push('git', DEST,
 
                                    clone_url='http://%s/tmp' % HOST)
 

	
 
        assert 'not found' in stderr
 

	
 
    def test_clone_and_create_lock_hg(self):
 
        # enable locking
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        r.enable_locking = True
 
        Session().add(r)
 
        Session().commit()
 
        # clone
 
        clone_url = _construct_url(HG_REPO)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        #check if lock was made
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
 

	
 
    def test_clone_and_create_lock_git(self):
 
        # enable locking
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        r.enable_locking = True
 
        Session().add(r)
 
        Session().commit()
 
        # clone
 
        clone_url = _construct_url(GIT_REPO)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        #check if lock was made
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
 

	
 
    def test_clone_after_repo_was_locked_hg(self):
 
        #lock repo
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
 
        #pull fails since repo is locked
 
        clone_url = _construct_url(HG_REPO)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 
        msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`"""
 
                % (HG_REPO, TEST_USER_ADMIN_LOGIN))
 
        assert msg in stderr
 

	
 
    def test_clone_after_repo_was_locked_git(self):
 
        #lock repo
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
 
        #pull fails since repo is locked
 
        clone_url = _construct_url(GIT_REPO)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 
        msg = ("""The requested URL returned error: 423""")
 
        assert msg in stderr
 

	
 
    def test_push_on_locked_repo_by_other_user_hg(self):
 
        #clone some temp
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(HG_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        #lock repo
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        # let this user actually push !
 
        RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN,
 
                                          perm='repository.write')
 
        Session().commit()
 
        Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
 

	
 
        #push fails repo is locked by other user !
 
        stdout, stderr = _add_files_and_push('hg', DEST,
 
                                             user=TEST_USER_REGULAR_LOGIN,
 
                                             passwd=TEST_USER_REGULAR_PASS)
 
        msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`"""
 
                % (HG_REPO, TEST_USER_ADMIN_LOGIN))
 
        assert msg in stderr
 

	
 
    def test_push_on_locked_repo_by_other_user_git(self):
 
        #clone some temp
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(GIT_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        #lock repo
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        # let this user actually push !
 
        RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN,
 
                                          perm='repository.write')
 
        Session().commit()
 
        Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
 

	
 
        #push fails repo is locked by other user !
 
        stdout, stderr = _add_files_and_push('git', DEST,
 
                                             user=TEST_USER_REGULAR_LOGIN,
 
                                             passwd=TEST_USER_REGULAR_PASS)
 
        err = 'Repository `%s` locked by user `%s`' % (GIT_REPO, TEST_USER_ADMIN_LOGIN)
 
        assert err in stderr
 

	
 
        #TODO: fix this somehow later on Git, Git is stupid and even if we throw
 
        #back 423 to it, it makes ANOTHER request and we fail there with 405 :/
 

	
 
        msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`"""
 
                % (GIT_REPO, TEST_USER_ADMIN_LOGIN))
 
        #msg = "405 Method Not Allowed"
 
        #assert msg in stderr
 

	
 
    def test_push_unlocks_repository_hg(self):
 
        # enable locking
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        r.enable_locking = True
 
        Session().add(r)
 
        Session().commit()
 
        #clone some temp
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(HG_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        #check for lock repo after clone
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        uid = User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
 
        assert r.locked[0] == uid
 

	
 
        #push is ok and repo is now unlocked
 
        stdout, stderr = _add_files_and_push('hg', DEST)
 
        assert ('remote: Released lock on repo `%s`' % HG_REPO) in stdout
 
        #we need to cleanup the Session Here !
 
        Session.remove()
 
        r = Repository.get_by_repo_name(HG_REPO)
 
        assert r.locked == [None, None]
 

	
 
    #TODO: fix me ! somehow during tests hooks don't get called on Git
 
    def test_push_unlocks_repository_git(self):
 
        # enable locking
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        r.enable_locking = True
 
        Session().add(r)
 
        Session().commit()
 
        #clone some temp
 
        DEST = _get_tmp_dir()
 
        clone_url = _construct_url(GIT_REPO, dest=DEST)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        #check for lock repo after clone
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
 

	
 
        #push is ok and repo is now unlocked
 
        stdout, stderr = _add_files_and_push('git', DEST)
 
        _check_proper_git_push(stdout, stderr)
 

	
 
        #assert ('remote: Released lock on repo `%s`' % GIT_REPO) in stdout
 
        #we need to cleanup the Session Here !
 
        Session.remove()
 
        r = Repository.get_by_repo_name(GIT_REPO)
 
        assert r.locked == [None, None]
 

	
 
    def test_ip_restriction_hg(self):
 
        user_model = UserModel()
 
        try:
 
            user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
 
            Session().commit()
 
            clone_url = _construct_url(HG_REPO)
 
            stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 
            assert 'abort: HTTP Error 403: Forbidden' in stderr
 
        finally:
 
            #release IP restrictions
 
            for ip in UserIpMap.getAll():
 
                UserIpMap.delete(ip.ip_id)
 
            Session().commit()
 

	
 
        time.sleep(2)
 
        clone_url = _construct_url(HG_REPO)
 
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)
 

	
 
        assert 'requesting all changes' in stdout
 
        assert 'adding changesets' in stdout
 
        assert 'adding manifests' in stdout
 
        assert 'adding file changes' in stdout
 

	
 
        assert stderr == ''
 

	
 
    def test_ip_restriction_git(self):
 
        user_model = UserModel()
 
        try:
 
            user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
 
            Session().commit()
 
            clone_url = _construct_url(GIT_REPO)
 
            stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 
            # The message apparently changed in Git 1.8.3, so match it loosely.
 
            assert re.search(r'\b403\b', stderr)
 
        finally:
 
            #release IP restrictions
 
            for ip in UserIpMap.getAll():
 
                UserIpMap.delete(ip.ip_id)
 
            Session().commit()
 

	
 
        time.sleep(2)
 
        clone_url = _construct_url(GIT_REPO)
 
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)
 

	
 
        assert 'Cloning into' in stdout + stderr
 
        assert stderr == '' or stdout == ''
0 comments (0 inline, 0 general)