Changeset - 54df936a9bd3
[Not reviewed]
default
0 5 0
Na'Tosha Bard - 11 years ago 2014-09-19 10:52:59
natosha@unity3d.com
Fix typos in comments
5 files changed with 9 insertions and 9 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/compare.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.compare
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
compare controller for pylons showing differences between two
 
repos, branches, bookmarks or tips
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 6, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import re
 

	
 
from webob.exc import HTTPBadRequest
 
from pylons import request, tmpl_context as c, url
 
from pylons.controllers.util import redirect
 
from pylons.i18n.translation import _
 

	
 
from kallithea.lib.vcs.utils.hgcompat import unionrepo
 
from kallithea.lib import helpers as h
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from kallithea.lib import diffs
 
from kallithea.model.db import Repository
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.controllers.changeset import _ignorews_url,\
 
    _context_url, get_line_ctx, get_ignore_ws
 
from kallithea.lib.graphmod import graph_data
 
from kallithea.lib.compat import json
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class CompareController(BaseRepoController):
 

	
 
    def __before__(self):
 
        super(CompareController, self).__before__()
 

	
 
    @staticmethod
 
    def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
 
        """
 
        Returns lists of changesets that can be merged from org_repo@org_rev
 
        to other_repo@other_rev
 
        ... and the other way
 
        ... and the ancestor that would be used for merge
 

	
 
        :param org_repo: repo object, that is most likely the orginal repo we forked from
 
        :param org_repo: repo object, that is most likely the original repo we forked from
 
        :param org_rev: the revision we want our compare to be made
 
        :param other_repo: repo object, mostl likely the fork of org_repo. It hass
 
        :param other_repo: repo object, most likely the fork of org_repo. It has
 
            all changesets that we need to obtain
 
        :param other_rev: revision we want out compare to be made on other_repo
 
        """
 
        ancestor = None
 
        if org_rev == other_rev:
 
            org_changesets = []
 
            other_changesets = []
 
            ancestor = org_rev
 

	
 
        elif alias == 'hg':
 
            #case two independent repos
 
            if org_repo != other_repo:
 
                hgrepo = unionrepo.unionrepository(other_repo.baseui,
 
                                                   other_repo.path,
 
                                                   org_repo.path)
 
                # all ancestors of other_rev will be in other_repo and
 
                # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
 

	
 
            #no remote compare do it on the same repository
 
            else:
 
                hgrepo = other_repo._repo
 

	
 
            ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev)
 
            if ancestors:
 
                # pick arbitrary ancestor - but there is usually only one
 
                ancestor = hgrepo[ancestors[0]].hex()
 

	
 
            other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
 
                                     other_rev, org_rev, org_rev)
 
            other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
 
            org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
 
                                   org_rev, other_rev, other_rev)
 

	
 
            org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
 

	
 
        elif alias == 'git':
 
            if org_repo != other_repo:
 
                from dulwich.repo import Repo
 
                from dulwich.client import SubprocessGitClient
 

	
 
                gitrepo = Repo(org_repo.path)
 
                SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
 

	
 
                gitrepo_remote = Repo(other_repo.path)
 
                SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
 

	
 
                revs = []
 
                for x in gitrepo_remote.get_walker(include=[other_rev],
 
                                                   exclude=[org_rev]):
 
                    revs.append(x.commit.id)
 

	
 
                other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
 
                if other_changesets:
 
                    ancestor = other_changesets[0].parents[0].raw_id
 
                else:
 
                    # no changesets from other repo, ancestor is the other_rev
 
                    ancestor = other_rev
 

	
 
            else:
 
                so, se = org_repo.run_git_command(
 
                    'log --reverse --pretty="format: %%H" -s %s..%s'
 
                        % (org_rev, other_rev)
 
                )
 
                other_changesets = [org_repo.get_changeset(cs)
 
                              for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
 
                so, se = org_repo.run_git_command(
 
                    'merge-base %s %s' % (org_rev, other_rev)
 
                )
 
                ancestor = re.findall(r'[0-9a-fA-F]{40}', so)[0]
 
            org_changesets = []
 

	
 
        else:
 
            raise Exception('Bad alias only git and hg is allowed')
 

	
 
        return other_changesets, org_changesets, ancestor
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name):
 
        c.compare_home = True
 
        org_repo = c.db_repo.repo_name
 
        other_repo = request.GET.get('other_repo', org_repo)
 
        c.a_repo = Repository.get_by_repo_name(org_repo)
 
        c.cs_repo = Repository.get_by_repo_name(other_repo)
 
        c.a_ref_name = c.cs_ref_name = _('Select changeset')
 
        return render('compare/compare_diff.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name):
 
        org_repo = c.db_repo.repo_name
 
        other_repo = request.GET.get('other_repo', org_repo)
 
        # If merge is True:
 
        #   Show what org would get if merged with other:
 
        #   List changesets that are ancestors of other but not of org.
 
        #   New changesets in org is thus ignored.
 
        #   Diff will be from common ancestor, and merges of org to other will thus be ignored.
 
        # If merge is False:
 
        #   Make a raw diff from org to other, no matter if related or not.
 
        #   Changesets in one and not in the other will be ignored
 
        merge = bool(request.GET.get('merge'))
 
        # fulldiff disables cut_off_limit
 
        c.fulldiff = request.GET.get('fulldiff')
 
        # partial uses compare_cs.html template directly
 
        partial = request.environ.get('HTTP_X_PARTIAL_XHR')
 
        # as_form puts hidden input field with changeset revisions
 
        c.as_form = partial and request.GET.get('as_form')
 
        # swap url for compare_diff page - never partial and never as_form
 
        c.swap_url = h.url('compare_url',
 
            repo_name=other_repo,
 
            org_ref_type=other_ref_type, org_ref_name=other_ref_name,
 
            other_repo=org_repo,
 
            other_ref_type=org_ref_type, other_ref_name=org_ref_name,
 
            merge=merge or '')
 

	
 
        # set callbacks for generating markup for icons
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = request.GET.get('context', 3)
 

	
 
        org_repo = Repository.get_by_repo_name(org_repo)
 
        other_repo = Repository.get_by_repo_name(other_repo)
 

	
 
        if org_repo is None:
 
            msg = 'Could not find org repo %s' % org_repo
 
            log.error(msg)
 
            h.flash(msg, category='error')
 
            return redirect(url('compare_home', repo_name=c.repo_name))
 

	
 
        if other_repo is None:
 
            msg = 'Could not find other repo %s' % other_repo
 
            log.error(msg)
 
            h.flash(msg, category='error')
 
            return redirect(url('compare_home', repo_name=c.repo_name))
 

	
 
        if org_repo.scm_instance.alias != other_repo.scm_instance.alias:
 
            msg = 'compare of two different kind of remote repos not available'
 
            log.error(msg)
 
            h.flash(msg, category='error')
 
            return redirect(url('compare_home', repo_name=c.repo_name))
 

	
 
        c.a_rev = self._get_ref_rev(org_repo, org_ref_type, org_ref_name,
 
            returnempty=True)
 
        c.cs_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name)
 

	
 
        c.compare_home = False
 
        c.a_repo = org_repo
 
        c.a_ref_name = org_ref_name
 
        c.a_ref_type = org_ref_type
 
        c.cs_repo = other_repo
 
        c.cs_ref_name = other_ref_name
 
        c.cs_ref_type = other_ref_type
 

	
 
        c.cs_ranges, c.cs_ranges_org, c.ancestor = self._get_changesets(
 
            org_repo.scm_instance.alias, org_repo.scm_instance, c.a_rev,
 
            other_repo.scm_instance, c.cs_rev)
 
        raw_ids = [x.raw_id for x in c.cs_ranges]
 
        c.cs_comments = other_repo.get_comments(raw_ids)
 
        c.statuses = other_repo.statuses(raw_ids)
 

	
 
        revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
 
        c.jsdata = json.dumps(graph_data(c.cs_repo.scm_instance, revs))
 

	
 
        if partial:
 
            return render('compare/compare_cs.html')
 
        if merge and c.ancestor:
 
            # case we want a simple diff without incoming changesets,
 
            # previewing what will be merged.
 
            # Make the diff on the other repo (which is known to have other_rev)
 
            log.debug('Using ancestor %s as rev1 instead of %s'
 
                      % (c.ancestor, c.a_rev))
 
            rev1 = c.ancestor
 
            org_repo = other_repo
 
        else: # comparing tips, not necessarily linearly related
 
            if merge:
 
                log.error('Unable to find ancestor revision')
 
            if org_repo != other_repo:
 
                # TODO: we could do this by using hg unionrepo
 
                log.error('cannot compare across repos %s and %s', org_repo, other_repo)
 
                h.flash(_('Cannot compare repositories without using common ancestor'), category='error')
 
                raise HTTPBadRequest
 
            rev1 = c.a_rev
 

	
 
        diff_limit = self.cut_off_limit if not c.fulldiff else None
 

	
 
        log.debug('running diff between %s and %s in %s'
 
                  % (rev1, c.cs_rev, org_repo.scm_instance.path))
 
        txtdiff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 

	
 
        diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff',
 
                                             diff_limit=diff_limit)
 
        _parsed = diff_processor.prepare()
 

	
 
        c.limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            c.limited_diff = True
 

	
 
        c.files = []
 
        c.changes = {}
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 
        for f in _parsed:
 
            st = f['stats']
 
            if not st['binary']:
 
                c.lines_added += st['added']
 
                c.lines_deleted += st['deleted']
 
            fid = h.FID('', f['filename'])
 
            c.files.append([fid, f['operation'], f['filename'], f['stats']])
 
            htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f])
 
            c.changes[fid] = [f['operation'], f['filename'], htmldiff]
 

	
 
        return render('compare/compare_diff.html')
kallithea/model/db.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.db
 
~~~~~~~~~~~~~~~~~~
 

	
 
Database Models for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 08, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import time
 
import logging
 
import datetime
 
import traceback
 
import hashlib
 
import collections
 
import functools
 

	
 
from sqlalchemy import *
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
 
from beaker.cache import cache_region, region_invalidate
 
from webob.exc import HTTPNotFound
 

	
 
from pylons.i18n.translation import lazy_ugettext as _
 

	
 
from kallithea import DB_PREFIX
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.utils.helpers import get_scm
 
from kallithea.lib.vcs.exceptions import VCSError
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
 
    safe_unicode, remove_prefix, time_to_datetime, aslist, Optional, safe_int, \
 
    get_clone_url, urlreadable
 
from kallithea.lib.compat import json
 
from kallithea.lib.caching_query import FromCache
 

	
 
from kallithea.model.meta import Base, Session
 

	
 
URL_SEP = '/'
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
 

	
 

	
 
class BaseModel(object):
 
    """
 
    Base Model for all classess
 
    """
 

	
 
    @classmethod
 
    def _get_keys(cls):
 
        """return column names for this model """
 
        return class_mapper(cls).c.keys()
 

	
 
    def get_dict(self):
 
        """
 
        return dict with keys and values corresponding
 
        to this model data """
 

	
 
        d = {}
 
        for k in self._get_keys():
 
            d[k] = getattr(self, k)
 

	
 
        # also use __json__() if present to get additional fields
 
        _json_attr = getattr(self, '__json__', None)
 
        if _json_attr:
 
            # update with attributes from __json__
 
            if callable(_json_attr):
 
                _json_attr = _json_attr()
 
            for k, val in _json_attr.iteritems():
 
                d[k] = val
 
        return d
 

	
 
    def get_appstruct(self):
 
        """return list with keys and values tupples corresponding
 
        to this model data """
 

	
 
        l = []
 
        for k in self._get_keys():
 
            l.append((k, getattr(self, k),))
 
        return l
 

	
 
    def populate_obj(self, populate_dict):
 
        """populate model with data from given populate_dict"""
 

	
 
        for k in self._get_keys():
 
            if k in populate_dict:
 
                setattr(self, k, populate_dict[k])
 

	
 
    @classmethod
 
    def query(cls):
 
        return Session().query(cls)
 

	
 
    @classmethod
 
    def get(cls, id_):
 
        if id_:
 
            return cls.query().get(id_)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        try:
 
            id_ = int(id_)
 
        except (TypeError, ValueError):
 
            raise HTTPNotFound
 

	
 
        res = cls.query().get(id_)
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def getAll(cls):
 
        # deprecated and left for backward compatibility
 
        return cls.get_all()
 

	
 
    @classmethod
 
    def get_all(cls):
 
        return cls.query().all()
 

	
 
    @classmethod
 
    def delete(cls, id_):
 
        obj = cls.query().get(id_)
 
        Session().delete(obj)
 

	
 
    def __repr__(self):
 
        if hasattr(self, '__unicode__'):
 
            # python repr needs to return str
 
            try:
 
                return safe_str(self.__unicode__())
 
            except UnicodeDecodeError:
 
                pass
 
        return '<DB:%s>' % (self.__class__.__name__)
 

	
 

	
 
class Setting(Base, BaseModel):
 
    __tablename__ = DB_PREFIX + 'settings'
 

	
 
    __table_args__ = (
 
        UniqueConstraint('app_settings_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    SETTINGS_TYPES = {
 
        'str': safe_str,
 
        'int': safe_int,
 
        'unicode': safe_unicode,
 
        'bool': str2bool,
 
        'list': functools.partial(aslist, sep=',')
 
    }
 
    DEFAULT_UPDATE_URL = ''
 

	
 
    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    _app_settings_value = Column("app_settings_value", String(4096, convert_unicode=False), nullable=True, unique=None, default=None)
 
    _app_settings_type = Column("app_settings_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 

	
 
    def __init__(self, key='', val='', type='unicode'):
 
        self.app_settings_name = key
 
        self.app_settings_value = val
 
        self.app_settings_type = type
 

	
 
    @validates('_app_settings_value')
 
    def validate_settings_value(self, key, val):
 
        assert type(val) == unicode
 
        return val
 

	
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        _type = self.app_settings_type
 
        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
 
        return converter(v)
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use unicode in app_settings_value
 

	
 
        :param val:
 
        """
 
        self._app_settings_value = safe_unicode(val)
 

	
 
    @hybrid_property
 
    def app_settings_type(self):
 
        return self._app_settings_type
 

	
 
    @app_settings_type.setter
 
    def app_settings_type(self, val):
 
        if val not in self.SETTINGS_TYPES:
 
            raise Exception('type must be one of %s got %s'
 
                            % (self.SETTINGS_TYPES.keys(), val))
 
        self._app_settings_type = val
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s[%s]')>" % (
 
            self.__class__.__name__,
 
            self.app_settings_name, self.app_settings_value, self.app_settings_type
 
        )
 

	
 
    @classmethod
 
    def get_by_name(cls, key):
 
        return cls.query()\
 
            .filter(cls.app_settings_name == key).scalar()
 

	
 
    @classmethod
 
    def get_by_name_or_create(cls, key, val='', type='unicode'):
 
        res = cls.get_by_name(key)
 
        if not res:
 
            res = cls(key, val, type)
 
        return res
 

	
 
    @classmethod
 
    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
 
        """
 
        Creates or updates Kallithea setting. If updates is triggered it will only
 
        update parameters that are explicityl set Optional instance will be skipped
 
        Creates or updates Kallithea setting. If updates are triggered, it will only
 
        update parameters that are explicitly set. Optional instance will be skipped.
 

	
 
        :param key:
 
        :param val:
 
        :param type:
 
        :return:
 
        """
 
        res = cls.get_by_name(key)
 
        if not res:
 
            val = Optional.extract(val)
 
            type = Optional.extract(type)
 
            res = cls(key, val, type)
 
        else:
 
            res.app_settings_name = key
 
            if not isinstance(val, Optional):
 
                # update if set
 
                res.app_settings_value = val
 
            if not isinstance(type, Optional):
 
                # update if set
 
                res.app_settings_type = type
 
        return res
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if not ret:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings[each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_auth_plugins(cls, cache=False):
 
        auth_plugins = cls.get_by_name("auth_plugins").app_settings_value
 
        return auth_plugins
 

	
 
    @classmethod
 
    def get_auth_settings(cls, cache=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('auth_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd[row.app_settings_name] = row.app_settings_value
 
        return fd
 

	
 
    @classmethod
 
    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('default_')).all()
 
        fd = {}
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 
    @classmethod
 
    def get_server_info(cls):
 
        import pkg_resources
 
        import platform
 
        import kallithea
 
        from kallithea.lib.utils import check_git_version
 
        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
 
        info = {
 
            'modules': sorted(mods, key=lambda k: k[0].lower()),
 
            'py_version': platform.python_version(),
 
            'platform': safe_unicode(platform.platform()),
 
            'kallithea_version': kallithea.__version__,
 
            'git_version': safe_unicode(check_git_version()),
 
            'git_path': kallithea.CONFIG.get('git_path')
 
        }
 
        return info
 

	
 

	
 
class Ui(Base, BaseModel):
 
    __tablename__ = DB_PREFIX + 'ui'
 
    __table_args__ = (
 
        UniqueConstraint('ui_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'changegroup.push_logger'
 
    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
 
    HOOK_PULL = 'outgoing.pull_logger'
 
    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
 

	
 
    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    ui_section = Column("ui_section", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    ui_key = Column("ui_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    ui_value = Column("ui_value", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
 

	
 
    # def __init__(self, section='', key='', value=''):
 
    #     self.ui_section = section
 
    #     self.ui_key = key
 
    #     self.ui_value = value
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key).scalar()
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key) or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session().add(new_ui)
 

	
 
    def __repr__(self):
 
        return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
 
                                    self.ui_key, self.ui_value)
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        UniqueConstraint('username'), UniqueConstraint('email'),
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    DEFAULT_USER = 'default'
 
    DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
 

	
 
    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    username = Column("username", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    password = Column("password", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("firstname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    lastname = Column("lastname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    extern_type = Column("extern_type", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    extern_name = Column("extern_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    _user_data = Column("user_data", LargeBinary(), nullable=True)  # JSON data
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UserGroupMember', cascade='all')
 

	
 
    notifications = relationship('UserNotification', cascade='all')
 
    # notifications assigned to this user
 
    user_created_notifications = relationship('Notification', cascade='all')
 
    # comments created by this user
 
    user_comments = relationship('ChangesetComment', cascade='all')
 
    #extra emails for this user
 
    user_emails = relationship('UserEmailMap', cascade='all')
 
    #extra api keys
 
    user_api_keys = relationship('UserApiKeys', cascade='all')
 

	
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 
    @property
 
    def firstname(self):
 
        # alias for future
 
        return self.name
 

	
 
    @property
 
    def emails(self):
 
        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
 
        return [self.email] + [x.email for x in other]
 

	
 
    @property
 
    def api_keys(self):
 
        other = UserApiKeys.query().filter(UserApiKeys.user==self).all()
 
        return [self.api_key] + [x.api_key for x in other]
 

	
 
    @property
 
    def ip_addresses(self):
 
        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
 
        return [x.ip_addr for x in ret]
 

	
 
    @property
 
    def username_and_name(self):
 
        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name_or_username(self):
 
        return ('%s %s' % (self.firstname, self.lastname)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def AuthUser(self):
 
        """
 
        Returns instance of AuthUser for this user
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        return AuthUser(user_id=self.user_id, api_key=self.api_key,
 
                        username=self.username)
 

	
 
    @hybrid_property
 
    def user_data(self):
 
        if not self._user_data:
 
            return {}
 

	
 
        try:
 
            return json.loads(self._user_data)
 
        except TypeError:
 
            return {}
 

	
 
    @user_data.setter
 
    def user_data(self, val):
 
        try:
 
            self._user_data = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                      self.user_id, self.username)
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.username.ilike(username))
 
        else:
 
            q = cls.query().filter(cls.username == username)
 

	
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(username)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key, cache=False, fallback=True):
 
        q = cls.query().filter(cls.api_key == api_key)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_api_key_%s" % api_key))
 
        res = q.scalar()
 

	
 
        if fallback and not res:
 
            #fallback to additional keys
 
            _res = UserApiKeys.query()\
 
                .filter(UserApiKeys.api_key == api_key)\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time()))\
 
                .first()
 
            if _res:
 
                res = _res.user
 
        return res
 

	
 
    @classmethod
 
    def get_by_email(cls, email, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.email.ilike(email))
 
        else:
 
            q = cls.query().filter(cls.email == email)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_email_key_%s" % email))
 

	
 
        ret = q.scalar()
 
        if ret is None:
 
            q = UserEmailMap.query()
 
            # try fetching in alternate email map
 
            if case_insensitive:
 
                q = q.filter(UserEmailMap.email.ilike(email))
 
            else:
 
                q = q.filter(UserEmailMap.email == email)
 
            q = q.options(joinedload(UserEmailMap.user))
 
            if cache:
 
                q = q.options(FromCache("sql_cache_short",
 
                                        "get_email_map_key_%s" % email))
 
            ret = getattr(q.scalar(), 'user', None)
 

	
 
        return ret
 

	
 
    @classmethod
 
    def get_from_cs_author(cls, author):
 
        """
 
        Tries to get User objects out of commit author string
 

	
 
        :param author:
 
        """
 
        from kallithea.lib.helpers import email, author_name
 
        # Valid email in the attribute passed, see if they're in the system
 
        _email = email(author)
 
        if _email:
 
            user = cls.get_by_email(_email, case_insensitive=True)
 
            if user:
 
                return user
 
        # Maybe we can match by username?
 
        _author = author_name(author)
 
        user = cls.get_by_username(_author, case_insensitive=True)
 
        if user:
 
            return user
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 
        self.last_login = datetime.datetime.now()
 
        Session().add(self)
 
        log.debug('updated user %s lastlogin' % self.username)
 

	
 
    @classmethod
 
    def get_first_admin(cls):
 
        user = User.query().filter(User.admin == True).first()
 
        if user is None:
 
            raise Exception('Missing administrative account!')
 
        return user
 

	
 
    @classmethod
 
    def get_default_user(cls, cache=False):
 
        user = User.get_by_username(User.DEFAULT_USER, cache=cache)
 
        if user is None:
 
            raise Exception('Missing default account!')
 
        return user
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating user related data for API
 
        """
 
        user = self
 
        data = dict(
 
            user_id=user.user_id,
 
            username=user.username,
 
            firstname=user.name,
 
            lastname=user.lastname,
 
            email=user.email,
 
            emails=user.emails,
 
            api_key=user.api_key,
 
            api_keys=user.api_keys,
 
            active=user.active,
 
            admin=user.admin,
 
            extern_type=user.extern_type,
 
            extern_name=user.extern_name,
 
            last_login=user.last_login,
 
            ip_addresses=user.ip_addresses
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
            full_name=self.full_name,
 
            full_name_or_username=self.full_name_or_username,
 
            short_contact=self.short_contact,
 
            full_contact=self.full_contact
 
        )
 
        data.update(self.get_api_data())
 
        return data
 

	
 

	
 
class UserApiKeys(Base, BaseModel):
 
    __tablename__ = 'user_api_keys'
 
    __table_args__ = (
 
        Index('uak_api_key_idx', 'api_key'),
 
        Index('uak_api_key_expires_idx', 'api_key', 'expires'),
 
        UniqueConstraint('api_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    __mapper_args__ = {}
 

	
 
    user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(255, convert_unicode=False), nullable=False, unique=True)
 
    description = Column('description', UnicodeText(1024))
 
    expires = Column('expires', Float(53), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    user = relationship('User', lazy='joined')
 

	
 
    @property
 
    def expired(self):
 
        if self.expires == -1:
 
            return False
 
        return time.time() > self.expires
 

	
 

	
 
class UserEmailMap(Base, BaseModel):
 
    __tablename__ = 'user_email_map'
 
    __table_args__ = (
 
        Index('uem_email_idx', 'email'),
 
        UniqueConstraint('email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    __mapper_args__ = {}
 

	
 
    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    user = relationship('User', lazy='joined')
 

	
 
    @validates('_email')
 
    def validate_email(self, key, email):
 
        # check if this email is not main one
 
        main_email = Session().query(User).filter(User.email == email).scalar()
 
        if main_email is not None:
 
            raise AttributeError('email %s is present is user table' % email)
 
        return email
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 

	
 
class UserIpMap(Base, BaseModel):
 
    __tablename__ = 'user_ip_map'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'ip_addr'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
 
    )
 
    __mapper_args__ = {}
 

	
 
    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    ip_addr = Column("ip_addr", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    user = relationship('User', lazy='joined')
 

	
 
    @classmethod
 
    def _get_ip_range(cls, ip_addr):
 
        from kallithea.lib import ipaddr
 
        net = ipaddr.IPNetwork(address=ip_addr)
 
        return [str(net.network), str(net.broadcast)]
 

	
 
    def __json__(self):
 
        return dict(
 
          ip_addr=self.ip_addr,
 
          ip_range=self._get_ip_range(self.ip_addr)
 
        )
 

	
 
    def __unicode__(self):
 
        return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
 
                                            self.user_id, self.ip_addr)
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    username = Column("username", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    repository_name = Column("repository_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    user_ip = Column("user_ip", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    action = Column("action", UnicodeText(1200000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                      self.repository_name,
 
                                      self.action)
 

	
 
    @property
 
    def action_as_day(self):
 
        return datetime.date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository', cascade='')
 

	
 

	
 
class UserGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_name = Column("users_group_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
 
    user_group_description = Column("user_group_description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    _group_data = Column("group_data", LargeBinary(), nullable=True)  # JSON data
 

	
 
    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
 
    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
 
    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 
    user_user_group_to_perm = relationship('UserUserGroupToPerm ', cascade='all')
 
    user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
 

	
 
    user = relationship('User')
 

	
 
    @hybrid_property
 
    def group_data(self):
 
        if not self._group_data:
 
            return {}
 

	
 
        try:
 
            return json.loads(self._group_data)
 
        except TypeError:
 
            return {}
 

	
 
    @group_data.setter
 
    def group_data(self, val):
 
        try:
 
            self._group_data = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                      self.users_group_id,
 
                                      self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False,
 
                          case_insensitive=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            q = cls.query().filter(cls.users_group_name == group_name)
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(group_name)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get(cls, user_group_id, cache=False):
 
        user_group = cls.query()
 
        if cache:
 
            user_group = user_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % user_group_id))
 
        return user_group.get(user_group_id)
 

	
 
    def get_api_data(self, with_members=True):
 
        user_group = self
 

	
 
        data = dict(
 
            users_group_id=user_group.users_group_id,
 
            group_name=user_group.users_group_name,
 
            group_description=user_group.user_group_description,
 
            active=user_group.users_group_active,
 
            owner=user_group.user.username,
 
        )
 
        if with_members:
 
            members = []
 
            for user in user_group.members:
 
                user = user.user
 
                members.append(user.get_api_data())
 
            data['members'] = members
 

	
 
        return data
 

	
 

	
 
class UserGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 

	
 
    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User', lazy='joined')
 
    users_group = relationship('UserGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 

	
 
class RepositoryField(Base, BaseModel):
 
    __tablename__ = 'repositories_fields'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
 

	
 
    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 
    field_key = Column("field_key", String(250, convert_unicode=False))
 
    field_label = Column("field_label", String(1024, convert_unicode=False), nullable=False)
 
    field_value = Column("field_value", String(10000, convert_unicode=False), nullable=False)
 
    field_desc = Column("field_desc", String(1024, convert_unicode=False), nullable=False)
 
    field_type = Column("field_type", String(256), nullable=False, unique=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repository = relationship('Repository')
 

	
 
    @property
 
    def field_key_prefixed(self):
 
        return 'ex_%s' % self.field_key
 

	
 
    @classmethod
 
    def un_prefix_key(cls, key):
 
        if key.startswith(cls.PREFIX):
 
            return key[len(cls.PREFIX):]
 
        return key
 

	
 
    @classmethod
 
    def get_by_key_name(cls, key, repo):
 
        row = cls.query()\
 
                .filter(cls.repository == repo)\
 
                .filter(cls.field_key == key).scalar()
 
        return row
 

	
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        UniqueConstraint('repo_name'),
 
        Index('r_repo_name_idx', 'repo_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
 
    DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
 

	
 
    STATE_CREATED = 'repo_state_created'
 
    STATE_PENDING = 'repo_state_pending'
 
    STATE_ERROR = 'repo_state_error'
 

	
 
    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repo_name = Column("repo_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
 
    repo_state = Column("repo_state", String(255), nullable=True)
 

	
 
    clone_uri = Column("clone_uri", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    repo_type = Column("repo_type", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column("description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    _landing_revision = Column("landing_revision", String(255, convert_unicode=False), nullable=False, unique=False, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 
    _locked = Column("locked", String(255, convert_unicode=False), nullable=True, unique=False, default=None)
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
 

	
 
    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
 
                             cascade='all')
 
    extra_fields = relationship('RepositoryField',
 
                                cascade="all, delete, delete-orphan")
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
 
                                   safe_unicode(self.repo_name))
 

	
 
    @hybrid_property
 
    def landing_rev(self):
 
        # always should return [rev_type, rev]
 
        if self._landing_revision:
 
            _rev_info = self._landing_revision.split(':')
 
            if len(_rev_info) < 2:
 
                _rev_info.insert(0, 'rev')
 
            return [_rev_info[0], _rev_info[1]]
 
        return [None, None]
 

	
 
    @landing_rev.setter
 
    def landing_rev(self, val):
 
        if ':' not in val:
 
            raise ValueError('value must be delimited with `:` and consist '
 
                             'of <rev_type>:<rev>, got %s instead' % val)
 
        self._landing_revision = val
 

	
 
    @hybrid_property
 
    def locked(self):
 
        # always should return [user_id, timelocked]
 
        if self._locked:
 
            _lock_info = self._locked.split(':')
 
            return int(_lock_info[0]), _lock_info[1]
 
        return [None, None]
 

	
 
    @locked.setter
 
    def locked(self, val):
 
        if val and isinstance(val, (list, tuple)):
 
            self._locked = ':'.join(map(str, val))
 
        else:
 
            self._locked = None
 

	
 
    @hybrid_property
 
    def changeset_cache(self):
 
        from kallithea.lib.vcs.backends.base import EmptyChangeset
 
        dummy = EmptyChangeset().__json__()
 
        if not self._changeset_cache:
 
            return dummy
 
        try:
 
            return json.loads(self._changeset_cache)
 
        except TypeError:
 
            return dummy
 

	
 
    @changeset_cache.setter
 
    def changeset_cache(self, val):
 
        try:
 
            self._changeset_cache = json.dumps(val)
 
        except Exception:
 
            log.error(traceback.format_exc())
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def normalize_repo_name(cls, repo_name):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(Ui)\
 
            .filter(Ui.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name, self.repo_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(Ui).filter(Ui.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*map(safe_unicode, p))
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from kallithea.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from kallithea.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = Setting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id, lock_time=None):
 
        if not lock_time:
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    @property
 
    def clone_uri_hidden(self):
 
        clone_uri = self.clone_uri
 
        if clone_uri:
 
            import urlobject
 
            url_obj = urlobject.URLObject(self.clone_uri)
 
            if url_obj.password:
 
                clone_uri = url_obj.with_password('*****')
 
        return clone_uri
 

	
 
    def clone_url(self, **override):
 
        import kallithea.lib.helpers as h
 
        qualified_home_url = h.canonical_url('home')
 

	
 
        uri_tmpl = None
 
        if 'with_id' in override:
 
            uri_tmpl = self.DEFAULT_CLONE_URI_ID
 
            del override['with_id']
 

	
 
        if 'uri_tmpl' in override:
 
            uri_tmpl = override['uri_tmpl']
 
            del override['uri_tmpl']
 

	
 
        # we didn't override our tmpl from **overrides
 
        if not uri_tmpl:
 
            uri_tmpl = self.DEFAULT_CLONE_URI
 
            try:
 
                from pylons import tmpl_context as c
 
                uri_tmpl = c.clone_uri_tmpl
 
            except Exception:
 
                # in any case if we call this outside of request context,
 
                # ie, not having tmpl_context set up
 
                pass
 

	
 
        return get_clone_url(uri_tmpl=uri_tmpl,
 
                             qualifed_home_url=qualified_home_url,
 
                             repo_name=self.repo_name,
 
                             repo_id=self.repo_id, **override)
 

	
 
    def set_state(self, state):
 
        self.repo_state = state
 
        Session().add(self)
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (not self.changeset_cache or cs_cache['raw_id'] != self.changeset_cache['raw_id']):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s'
 
                      % (self.repo_name, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('changeset_cache for %s already up to date with %s'
 
                      % (self.repo_name, cs_cache['raw_id']))
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = collections.defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        """
 
        if not revisions:
 
            return {}
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)\
 
            .filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        stat = ChangesetStatus.DEFAULT
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from kallithea.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    def set_invalidate(self):
 
        """
 
        Mark caches of this repo as invalid.
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance(self):
 
        import kallithea
 
        full_cache = str2bool(kallithea.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object' % (rn))
 
            region_invalidate(_c, None, rn)
 
        else:
 
            log.debug('Getting scm_instance of %s from cache' % (rn))
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 
    def __json__(self):
 
        return dict(landing_rev = self.landing_rev)
 

	
 
class RepoGroup(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        UniqueConstraint('group_name', 'group_parent_id'),
 
        CheckConstraint('group_id != group_parent_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    __mapper_args__ = {'order_by': 'group_name'}
 

	
 
    SEP = ' &raquo; '
 

	
 
    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    group_name = Column("group_name", String(255, convert_unicode=False), nullable=False, unique=True, default=None)
 
    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column("group_description", String(10000, convert_unicode=False), nullable=True, unique=None, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 
    user = relationship('User')
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                      self.group_name)
 

	
 
    @classmethod
 
    def _generate_choice(cls, repo_group):
 
        from webhelpers.html import literal as _literal
 
        _name = lambda k: _literal(cls.SEP.join(k))
 
        return repo_group.group_id, _name(repo_group.full_path_splitted)
 

	
 
    @classmethod
 
    def groups_choices(cls, groups=None, show_empty_group=True):
 
        if not groups:
 
            groups = cls.query().all()
 

	
 
        repo_groups = []
 
        if show_empty_group:
 
            repo_groups = [('-1', u'-- %s --' % _('top level'))]
 

	
 
        repo_groups.extend([cls._generate_choice(x) for x in groups])
 

	
 
        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(cls.SEP)[0])
 
        return repo_groups
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 10
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinit loops
 
                log.error(('more than %s parents found for group %s, stopping '
 
                           'recursive parent fetching' % (parents_recursion_limit, self)))
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(RepoGroup.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(RepoGroup.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query()\
 
                .filter(Repository.group == self)\
 
                .order_by(Repository.repo_name)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 
    def _recursive_objects(self, include_repos=True):
 
        all_ = []
 

	
 
        def _get_members(root_gr):
 
            if include_repos:
 
                for r in root_gr.repositories:
 
                    all_.append(r)
 
            childs = root_gr.children.all()
 
            if childs:
 
                for gr in childs:
 
                    all_.append(gr)
 
                    _get_members(gr)
 

	
 
        _get_members(self)
 
        return [self] + all_
 

	
 
    def recursive_groups_and_repos(self):
 
        """
 
        Recursive return all groups, with repositories in those groups
 
        """
 
        return self._recursive_objects()
 

	
 
    def recursive_groups(self):
 
        """
 
        Returns all children groups for this group including children of children
 
        """
 
        return self._recursive_objects(include_repos=False)
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return RepoGroup.url_sep().join(path_prefix + [group_name])
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating api data
 

	
 
        """
 
        group = self
 
        data = dict(
 
            group_id=group.group_id,
 
            group_name=group.group_name,
 
            group_description=group.group_description,
 
            parent_group=group.parent_group.group_name if group.parent_group else None,
 
            repositories=[x.repo_name for x in group.repositories],
 
            owner=group.user.username
 
        )
 
        return data
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = (
 
        Index('p_perm_name_idx', 'permission_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    PERMS = [
 
        ('hg.admin', _('Kallithea Administrator')),
 

	
 
        ('repository.none', _('Repository no access')),
 
        ('repository.read', _('Repository read access')),
 
        ('repository.write', _('Repository write access')),
 
        ('repository.admin', _('Repository admin access')),
 

	
 
        ('group.none', _('Repository group no access')),
 
        ('group.read', _('Repository group read access')),
 
        ('group.write', _('Repository group write access')),
 
        ('group.admin', _('Repository group admin access')),
 

	
 
        ('usergroup.none', _('User group no access')),
 
        ('usergroup.read', _('User group read access')),
 
        ('usergroup.write', _('User group write access')),
 
        ('usergroup.admin', _('User group admin access')),
 

	
 
        ('hg.repogroup.create.false', _('Repository Group creation disabled')),
 
        ('hg.repogroup.create.true', _('Repository Group creation enabled')),
 

	
 
        ('hg.usergroup.create.false', _('User Group creation disabled')),
 
        ('hg.usergroup.create.true', _('User Group creation enabled')),
 

	
 
        ('hg.create.none', _('Repository creation disabled')),
 
        ('hg.create.repository', _('Repository creation enabled')),
 
        ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
 
        ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
 

	
 
        ('hg.fork.none', _('Repository forking disabled')),
 
        ('hg.fork.repository', _('Repository forking enabled')),
 

	
 
        ('hg.register.none', _('Registration disabled')),
 
        ('hg.register.manual_activate', _('User Registration with manual account activation')),
 
        ('hg.register.auto_activate', _('User Registration with automatic account activation')),
 

	
 
        ('hg.extern_activate.manual', _('Manual activation of external account')),
 
        ('hg.extern_activate.auto', _('Automatic activation of external account')),
 

	
 
    ]
 

	
 
    #definition of system default permissions for DEFAULT user
 
    DEFAULT_USER_PERMISSIONS = [
 
        'repository.read',
 
        'group.read',
 
        'usergroup.read',
 
        'hg.create.repository',
 
        'hg.create.write_on_repogroup.true',
 
        'hg.fork.repository',
 
        'hg.register.manual_activate',
 
        'hg.extern_activate.auto',
 
    ]
 

	
 
    # defines which permissions are more important higher the more important
 
    # Weight defines which permissions are more important.
 
    # The higher number the more important.
 
    PERM_WEIGHTS = {
 
        'repository.none': 0,
 
        'repository.read': 1,
 
        'repository.write': 3,
 
        'repository.admin': 4,
 

	
 
        'group.none': 0,
 
        'group.read': 1,
 
        'group.write': 3,
 
        'group.admin': 4,
 

	
 
        'usergroup.none': 0,
 
        'usergroup.read': 1,
 
        'usergroup.write': 3,
 
        'usergroup.admin': 4,
 
        'hg.repogroup.create.false': 0,
 
        'hg.repogroup.create.true': 1,
 

	
 
        'hg.usergroup.create.false': 0,
 
        'hg.usergroup.create.true': 1,
 

	
 
        'hg.fork.none': 0,
 
        'hg.fork.repository': 1,
 
        'hg.create.none': 0,
 
        'hg.create.repository': 1
 
    }
 

	
 
    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    permission_name = Column("permission_name", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 
    permission_longname = Column("permission_longname", String(255, convert_unicode=False), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__, self.permission_id, self.permission_name
 
        )
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
    @classmethod
 
    def get_default_perms(cls, default_user_id):
 
        q = Session().query(UserRepoToPerm, Repository, cls)\
 
         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
 
         .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_group_perms(cls, default_user_id):
 
        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
 
         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_user_group_perms(cls, default_user_id):
 
        q = Session().query(UserUserGroupToPerm, UserGroup, cls)\
 
         .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
 
         .join((cls, UserUserGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserUserGroupToPerm.user_id == default_user_id)
 

	
kallithea/model/forms.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
this is forms validation classes
 
http://formencode.org/module-formencode.validators.html
 
for list off all availible validators
 

	
 
we can create our own validators
 

	
 
The table below outlines the options which can be used in a schema in addition to the validators themselves
 
pre_validators          []     These validators will be applied before the schema
 
chained_validators      []     These validators will be applied after the schema
 
allow_extra_fields      False     If True, then it is not an error when keys that aren't associated with a validator are present
 
filter_extra_fields     False     If True, then keys that aren't associated with a validator are removed
 
if_key_missing          NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
 
ignore_key_missing      False     If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
 

	
 

	
 
<name> = formencode.validators.<name of validator>
 
<name> must equal form name
 
list=[1,2,3,4,5]
 
for SELECT use formencode.All(OneOf(list), Int())
 

	
 
"""
 
import logging
 

	
 
import formencode
 
from formencode import All
 

	
 
from pylons.i18n.translation import _
 

	
 
from kallithea import BACKENDS
 
from kallithea.model import validators as v
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class LoginForm(formencode.Schema):
 
    allow_extra_fields = True
 
    filter_extra_fields = True
 
    username = v.UnicodeString(
 
        strip=True,
 
        min=1,
 
        not_empty=True,
 
        messages={
 
           'empty': _(u'Please enter a login'),
 
           'tooShort': _(u'Enter a value %(min)i characters long or more')}
 
    )
 

	
 
    password = v.UnicodeString(
 
        strip=False,
 
        min=3,
 
        not_empty=True,
 
        messages={
 
            'empty': _(u'Please enter a password'),
 
            'tooShort': _(u'Enter %(min)i characters or more')}
 
    )
 

	
 
    remember = v.StringBoolean(if_missing=False)
 

	
 
    chained_validators = [v.ValidAuth()]
 

	
 

	
 
def PasswordChangeForm(username):
 
    class _PasswordChangeForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        current_password = v.ValidOldPassword(username)(not_empty=True)
 
        new_password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6))
 
        new_password_confirmation = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6))
 

	
 
        chained_validators = [v.ValidPasswordsMatch('new_password',
 
                                                    'new_password_confirmation')]
 
    return _PasswordChangeForm
 

	
 

	
 
def UserForm(edit=False, old_data={}):
 
    class _UserForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
 
                       v.ValidUsername(edit, old_data))
 
        if edit:
 
            new_password = All(
 
                v.ValidPassword(),
 
                v.UnicodeString(strip=False, min=6, not_empty=False)
 
            )
 
            password_confirmation = All(
 
                v.ValidPassword(),
 
                v.UnicodeString(strip=False, min=6, not_empty=False),
 
            )
 
            admin = v.StringBoolean(if_missing=False)
 
        else:
 
            password = All(
 
                v.ValidPassword(),
 
                v.UnicodeString(strip=False, min=6, not_empty=True)
 
            )
 
            password_confirmation = All(
 
                v.ValidPassword(),
 
                v.UnicodeString(strip=False, min=6, not_empty=False)
 
            )
 

	
 
        active = v.StringBoolean(if_missing=False)
 
        firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data))
 
        extern_name = v.UnicodeString(strip=True)
 
        extern_type = v.UnicodeString(strip=True)
 
        chained_validators = [v.ValidPasswordsMatch()]
 
    return _UserForm
 

	
 

	
 
def UserGroupForm(edit=False, old_data={}, available_members=[]):
 
    class _UserGroupForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        users_group_name = All(
 
            v.UnicodeString(strip=True, min=1, not_empty=True),
 
            v.ValidUserGroup(edit, old_data)
 
        )
 
        user_group_description = v.UnicodeString(strip=True, min=1,
 
                                                 not_empty=False)
 

	
 
        users_group_active = v.StringBoolean(if_missing=False)
 

	
 
        if edit:
 
            users_group_members = v.OneOf(
 
                available_members, hideList=False, testValueList=True,
 
                if_missing=None, not_empty=False
 
            )
 

	
 
    return _UserGroupForm
 

	
 

	
 
def RepoGroupForm(edit=False, old_data={}, available_groups=[],
 
                   can_create_in_root=False):
 
    class _RepoGroupForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 

	
 
        group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
 
                         v.SlugifyName(),
 
                         v.ValidRegex(msg=_('Name must not contain only digits'))(r'(?!^\d+$)^.+$'))
 
        group_description = v.UnicodeString(strip=True, min=1,
 
                                            not_empty=False)
 
        group_copy_permissions = v.StringBoolean(if_missing=False)
 

	
 
        if edit:
 
            #FIXME: do a special check that we cannot move a group to one of
 
            #it's children
 
            #its children
 
            pass
 
        group_parent_id = All(v.CanCreateGroup(can_create_in_root),
 
                              v.OneOf(available_groups, hideList=False,
 
                                      testValueList=True,
 
                                      if_missing=None, not_empty=True))
 
        enable_locking = v.StringBoolean(if_missing=False)
 
        chained_validators = [v.ValidRepoGroup(edit, old_data)]
 

	
 
    return _RepoGroupForm
 

	
 

	
 
def RegisterForm(edit=False, old_data={}):
 
    class _RegisterForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        username = All(
 
            v.ValidUsername(edit, old_data),
 
            v.UnicodeString(strip=True, min=1, not_empty=True)
 
        )
 
        password = All(
 
            v.ValidPassword(),
 
            v.UnicodeString(strip=False, min=6, not_empty=True)
 
        )
 
        password_confirmation = All(
 
            v.ValidPassword(),
 
            v.UnicodeString(strip=False, min=6, not_empty=True)
 
        )
 
        active = v.StringBoolean(if_missing=False)
 
        firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data))
 

	
 
        chained_validators = [v.ValidPasswordsMatch()]
 

	
 
    return _RegisterForm
 

	
 

	
 
def PasswordResetForm():
 
    class _PasswordResetForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        email = All(v.ValidSystemEmail(), v.Email(not_empty=True))
 
    return _PasswordResetForm
 

	
 

	
 
def RepoForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(),
 
             repo_groups=[], landing_revs=[]):
 
    class _RepoForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
 
                        v.SlugifyName())
 
        repo_group = All(v.CanWriteGroup(old_data),
 
                         v.OneOf(repo_groups, hideList=True))
 
        repo_type = v.OneOf(supported_backends, required=False,
 
                            if_missing=old_data.get('repo_type'))
 
        repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        repo_private = v.StringBoolean(if_missing=False)
 
        repo_landing_rev = v.OneOf(landing_revs, hideList=True)
 
        repo_copy_permissions = v.StringBoolean(if_missing=False)
 
        clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
 

	
 
        repo_enable_statistics = v.StringBoolean(if_missing=False)
 
        repo_enable_downloads = v.StringBoolean(if_missing=False)
 
        repo_enable_locking = v.StringBoolean(if_missing=False)
 

	
 
        if edit:
 
            #this is repo owner
 
            user = All(v.UnicodeString(not_empty=True), v.ValidRepoUser())
 
            clone_uri_change = v.UnicodeString(not_empty=False, if_missing=v.Missing)
 

	
 
        chained_validators = [v.ValidCloneUri(),
 
                              v.ValidRepoName(edit, old_data)]
 
    return _RepoForm
 

	
 

	
 
def RepoPermsForm():
 
    class _RepoPermsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        chained_validators = [v.ValidPerms(type_='repo')]
 
    return _RepoPermsForm
 

	
 

	
 
def RepoGroupPermsForm(valid_recursive_choices):
 
    class _RepoGroupPermsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        recursive = v.OneOf(valid_recursive_choices)
 
        chained_validators = [v.ValidPerms(type_='repo_group')]
 
    return _RepoGroupPermsForm
 

	
 

	
 
def UserGroupPermsForm():
 
    class _UserPermsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        chained_validators = [v.ValidPerms(type_='user_group')]
 
    return _UserPermsForm
 

	
 

	
 
def RepoFieldForm():
 
    class _RepoFieldForm(formencode.Schema):
 
        filter_extra_fields = True
 
        allow_extra_fields = True
 

	
 
        new_field_key = All(v.FieldKey(),
 
                            v.UnicodeString(strip=True, min=3, not_empty=True))
 
        new_field_value = v.UnicodeString(not_empty=False, if_missing='')
 
        new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
 
                                 if_missing='str')
 
        new_field_label = v.UnicodeString(not_empty=False)
 
        new_field_desc = v.UnicodeString(not_empty=False)
 

	
 
    return _RepoFieldForm
 

	
 

	
 
def RepoForkForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(),
 
                 repo_groups=[], landing_revs=[]):
 
    class _RepoForkForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
 
                        v.SlugifyName())
 
        repo_group = All(v.CanWriteGroup(),
 
                         v.OneOf(repo_groups, hideList=True))
 
        repo_type = All(v.ValidForkType(old_data), v.OneOf(supported_backends))
 
        description = v.UnicodeString(strip=True, min=1, not_empty=True)
 
        private = v.StringBoolean(if_missing=False)
 
        copy_permissions = v.StringBoolean(if_missing=False)
 
        update_after_clone = v.StringBoolean(if_missing=False)
 
        fork_parent_id = v.UnicodeString()
 
        chained_validators = [v.ValidForkName(edit, old_data)]
 
        landing_rev = v.OneOf(landing_revs, hideList=True)
 

	
 
    return _RepoForkForm
 

	
 

	
 
def ApplicationSettingsForm():
 
    class _ApplicationSettingsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        title = v.UnicodeString(strip=True, not_empty=False)
 
        realm = v.UnicodeString(strip=True, min=1, not_empty=True)
 
        ga_code = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
 
        captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
 

	
 
    return _ApplicationSettingsForm
 

	
 

	
 
def ApplicationVisualisationForm():
 
    class _ApplicationVisualisationForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        show_public_icon = v.StringBoolean(if_missing=False)
 
        show_private_icon = v.StringBoolean(if_missing=False)
 
        stylify_metatags = v.StringBoolean(if_missing=False)
 

	
 
        repository_fields = v.StringBoolean(if_missing=False)
 
        lightweight_journal = v.StringBoolean(if_missing=False)
 
        dashboard_items = v.Int(min=5, not_empty=True)
 
        admin_grid_items = v.Int(min=5, not_empty=True)
 
        show_version = v.StringBoolean(if_missing=False)
 
        use_gravatar = v.StringBoolean(if_missing=False)
 
        gravatar_url = v.UnicodeString(min=3)
 
        clone_uri_tmpl = v.UnicodeString(min=3)
 

	
 
    return _ApplicationVisualisationForm
 

	
 

	
 
def ApplicationUiSettingsForm():
 
    class _ApplicationUiSettingsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = False
 
        web_push_ssl = v.StringBoolean(if_missing=False)
 
        paths_root_path = All(
 
            v.ValidPath(),
 
            v.UnicodeString(strip=True, min=1, not_empty=True)
 
        )
 
        hooks_changegroup_update = v.StringBoolean(if_missing=False)
 
        hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
 
        hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
 
        hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
 

	
 
        extensions_largefiles = v.StringBoolean(if_missing=False)
 
        extensions_hgsubversion = v.StringBoolean(if_missing=False)
 
        extensions_hggit = v.StringBoolean(if_missing=False)
 

	
 
    return _ApplicationUiSettingsForm
 

	
 

	
 
def DefaultPermissionsForm(repo_perms_choices, group_perms_choices,
 
                           user_group_perms_choices, create_choices,
 
                           create_on_write_choices, repo_group_create_choices,
 
                           user_group_create_choices, fork_choices,
 
                           register_choices, extern_activate_choices):
 
    class _DefaultPermissionsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        overwrite_default_repo = v.StringBoolean(if_missing=False)
 
        overwrite_default_group = v.StringBoolean(if_missing=False)
 
        overwrite_default_user_group = v.StringBoolean(if_missing=False)
 
        anonymous = v.StringBoolean(if_missing=False)
 
        default_repo_perm = v.OneOf(repo_perms_choices)
 
        default_group_perm = v.OneOf(group_perms_choices)
 
        default_user_group_perm = v.OneOf(user_group_perms_choices)
 

	
 
        default_repo_create = v.OneOf(create_choices)
 
        create_on_write = v.OneOf(create_on_write_choices)
 
        default_user_group_create = v.OneOf(user_group_create_choices)
 
        #default_repo_group_create = v.OneOf(repo_group_create_choices) #not impl. yet
 
        default_fork = v.OneOf(fork_choices)
 

	
 
        default_register = v.OneOf(register_choices)
 
        default_extern_activate = v.OneOf(extern_activate_choices)
 
    return _DefaultPermissionsForm
 

	
 

	
 
def CustomDefaultPermissionsForm():
 
    class _CustomDefaultPermissionsForm(formencode.Schema):
 
        filter_extra_fields = True
 
        allow_extra_fields = True
 
        inherit_default_permissions = v.StringBoolean(if_missing=False)
 

	
 
        create_repo_perm = v.StringBoolean(if_missing=False)
 
        create_user_group_perm = v.StringBoolean(if_missing=False)
 
        #create_repo_group_perm Impl. later
 

	
 
        fork_repo_perm = v.StringBoolean(if_missing=False)
 

	
 
    return _CustomDefaultPermissionsForm
 

	
 

	
 
def DefaultsForm(edit=False, old_data={}, supported_backends=BACKENDS.keys()):
 
    class _DefaultsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        default_repo_type = v.OneOf(supported_backends)
 
        default_repo_private = v.StringBoolean(if_missing=False)
 
        default_repo_enable_statistics = v.StringBoolean(if_missing=False)
 
        default_repo_enable_downloads = v.StringBoolean(if_missing=False)
 
        default_repo_enable_locking = v.StringBoolean(if_missing=False)
 

	
 
    return _DefaultsForm
 

	
 

	
 
def AuthSettingsForm(current_active_modules):
 
    class _AuthSettingsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        auth_plugins = All(v.ValidAuthPlugins(),
 
                           v.UniqueListFromString()(not_empty=True))
 

	
 
        def __init__(self, *args, **kwargs):
 
            # The auth plugins tell us what form validators they use
 
            if current_active_modules:
 
                import kallithea.lib.auth_modules
 
                from kallithea.lib.auth_modules import LazyFormencode
 
                for module in current_active_modules:
 
                    plugin = kallithea.lib.auth_modules.loadplugin(module)
 
                    plugin_name = plugin.name
 
                    for sv in plugin.plugin_settings():
 
                        newk = "auth_%s_%s" % (plugin_name, sv["name"])
 
                        # can be a LazyFormencode object from plugin settings
 
                        validator = sv["validator"]
 
                        if isinstance(validator, LazyFormencode):
 
                            validator = validator()
 
                        #init all lazy validators from formencode.All
 
                        if isinstance(validator, All):
 
                            init_validators = []
 
                            for validator in validator.validators:
 
                                if isinstance(validator, LazyFormencode):
 
                                    validator = validator()
 
                                init_validators.append(validator)
 
                            validator.validators = init_validators
 

	
 
                        self.add_field(newk, validator)
 
            formencode.Schema.__init__(self, *args, **kwargs)
 

	
 
    return _AuthSettingsForm
 

	
 

	
 
def LdapSettingsForm(tls_reqcert_choices, search_scope_choices,
 
                     tls_kind_choices):
 
    class _LdapSettingsForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 
        #pre_validators = [LdapLibValidator]
 
        ldap_active = v.StringBoolean(if_missing=False)
 
        ldap_host = v.UnicodeString(strip=True,)
 
        ldap_port = v.Number(strip=True,)
 
        ldap_tls_kind = v.OneOf(tls_kind_choices)
 
        ldap_tls_reqcert = v.OneOf(tls_reqcert_choices)
 
        ldap_dn_user = v.UnicodeString(strip=True,)
 
        ldap_dn_pass = v.UnicodeString(strip=True,)
 
        ldap_base_dn = v.UnicodeString(strip=True,)
 
        ldap_filter = v.UnicodeString(strip=True,)
 
        ldap_search_scope = v.OneOf(search_scope_choices)
 
        ldap_attr_login = v.AttrLoginValidator()(not_empty=True)
 
        ldap_attr_firstname = v.UnicodeString(strip=True,)
 
        ldap_attr_lastname = v.UnicodeString(strip=True,)
 
        ldap_attr_email = v.UnicodeString(strip=True,)
 

	
 
    return _LdapSettingsForm
 

	
 

	
 
def UserExtraEmailForm():
 
    class _UserExtraEmailForm(formencode.Schema):
 
        email = All(v.UniqSystemEmail(), v.Email(not_empty=True))
 
    return _UserExtraEmailForm
 

	
 

	
 
def UserExtraIpForm():
 
    class _UserExtraIpForm(formencode.Schema):
 
        ip = v.ValidIp()(not_empty=True)
 
    return _UserExtraIpForm
 

	
 

	
 
def PullRequestForm(repo_id):
 
    class _PullRequestForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        user = v.UnicodeString(strip=True, required=True)
 
        org_repo = v.UnicodeString(strip=True, required=True)
 
        org_ref = v.UnicodeString(strip=True, required=True)
 
        other_repo = v.UnicodeString(strip=True, required=True)
 
        other_ref = v.UnicodeString(strip=True, required=True)
 
        review_members = v.Set()
 

	
 
        pullrequest_title = v.UnicodeString(strip=True, required=True)
 
        pullrequest_desc = v.UnicodeString(strip=True, required=False)
 

	
 
    return _PullRequestForm
 

	
 

	
 
def PullRequestPostForm():
 
    class _PullRequestPostForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        pullrequest_title = v.UnicodeString(strip=True, required=True)
 
        pullrequest_desc = v.UnicodeString(strip=True, required=False)
 

	
 
    return _PullRequestPostForm
 

	
 

	
 
def GistForm(lifetime_options):
 
    class _GistForm(formencode.Schema):
 

	
 
        filename = All(v.BasePath()(),
 
                       v.UnicodeString(strip=True, required=False))
 
        description = v.UnicodeString(required=False, if_missing='')
 
        lifetime = v.OneOf(lifetime_options)
 
        mimetype = v.UnicodeString(required=False, if_missing=None)
 
        content = v.UnicodeString(required=True, not_empty=True)
 
        public = v.UnicodeString(required=False, if_missing='')
 
        private = v.UnicodeString(required=False, if_missing='')
 

	
 
    return _GistForm
kallithea/model/scm.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.scm
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Scm model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import re
 
import time
 
import traceback
 
import logging
 
import cStringIO
 
import pkg_resources
 
from os.path import join as jn
 

	
 
from sqlalchemy import func
 
from pylons.i18n.translation import _
 

	
 
import kallithea
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.exceptions import RepositoryError
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea import BACKENDS
 
from kallithea.lib import helpers as h
 
from kallithea.lib.utils2 import safe_str, safe_unicode, get_server_url,\
 
    _set_extras
 
from kallithea.lib.auth import HasRepoPermissionAny, HasRepoGroupPermissionAny,\
 
    HasUserGroupPermissionAny
 
from kallithea.lib.utils import get_filesystem_repos, make_ui, \
 
    action_logger
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, Ui, CacheInvalidation, \
 
    UserFollowing, UserLog, User, RepoGroup, PullRequest
 
from kallithea.lib.hooks import log_push_action
 
from kallithea.lib.exceptions import NonRelativePathError, IMCCommitError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
 

	
 

	
 
class CachedRepoList(object):
 
    """
 
    Cached repo list, uses in-memory cache after initialization, that is
 
    super fast
 
    """
 

	
 
    def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
 
        self.db_repo_list = db_repo_list
 
        self.repos_path = repos_path
 
        self.order_by = order_by
 
        self.reversed = (order_by or '').startswith('-')
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write',
 
                        'repository.admin']
 
        self.perm_set = perm_set
 

	
 
    def __len__(self):
 
        return len(self.db_repo_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        # pre-propagated valid_cache_keys to save executing select statements
 
        # for each repo
 
        valid_cache_keys = CacheInvalidation.get_valid_cache_keys()
 

	
 
        for dbr in self.db_repo_list:
 
            scmr = dbr.scm_instance_cached(valid_cache_keys)
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            try:
 
                last_change = scmr.last_change
 
                tip = h.get_changeset_safe(scmr, 'tip')
 
            except Exception:
 
                log.error(
 
                    '%s this repository is present in database but it '
 
                    'cannot be created as an scm instance, org_exc:%s'
 
                    % (dbr.repo_name, traceback.format_exc())
 
                )
 
                continue
 

	
 
            tmp_d = {}
 
            tmp_d['name'] = dbr.repo_name
 
            tmp_d['name_sort'] = tmp_d['name'].lower()
 
            tmp_d['raw_name'] = tmp_d['name'].lower()
 
            tmp_d['description'] = dbr.description
 
            tmp_d['description_sort'] = tmp_d['description'].lower()
 
            tmp_d['last_change'] = last_change
 
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
 
            tmp_d['tip'] = tip.raw_id
 
            tmp_d['tip_sort'] = tip.revision
 
            tmp_d['rev'] = tip.revision
 
            tmp_d['contact'] = dbr.user.full_contact
 
            tmp_d['contact_sort'] = tmp_d['contact']
 
            tmp_d['owner_sort'] = tmp_d['contact']
 
            tmp_d['repo_archives'] = list(scmr._get_archives())
 
            tmp_d['last_msg'] = tip.message
 
            tmp_d['author'] = tip.author
 
            tmp_d['dbrepo'] = dbr.get_dict()
 
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
 
            yield tmp_d
 

	
 

	
 
class SimpleCachedRepoList(CachedRepoList):
 
    """
 
    Lighter version of CachedRepoList without the scm initialisation
 
    """
 

	
 
    def __iter__(self):
 
        for dbr in self.db_repo_list:
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            tmp_d = {
 
                'name': dbr.repo_name,
 
                'dbrepo': dbr.get_dict(),
 
                'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
 
            }
 
            yield tmp_d
 

	
 

	
 
class _PermCheckIterator(object):
 
    def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None):
 
        """
 
        Creates iterator from given list of objects, additionally
 
        checking permission for them from perm_set var
 

	
 
        :param obj_list: list of db objects
 
        :param obj_attr: attribute of object to pass into perm_checker
 
        :param perm_set: list of permissions to check
 
        :param perm_checker: callable to check permissions against
 
        """
 
        self.obj_list = obj_list
 
        self.obj_attr = obj_attr
 
        self.perm_set = perm_set
 
        self.perm_checker = perm_checker
 
        self.extra_kwargs = extra_kwargs or {}
 

	
 
    def __len__(self):
 
        return len(self.obj_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        for db_obj in self.obj_list:
 
            # check permission at this level
 
            name = getattr(db_obj, self.obj_attr, None)
 
            if not self.perm_checker(*self.perm_set)(
 
                    name, self.__class__.__name__, **self.extra_kwargs):
 
                continue
 

	
 
            yield db_obj
 

	
 

	
 
class RepoList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write', 'repository.admin']
 

	
 
        super(RepoList, self).__init__(obj_list=db_repo_list,
 
                    obj_attr='repo_name', perm_set=perm_set,
 
                    perm_checker=HasRepoPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class RepoGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['group.read', 'group.write', 'group.admin']
 

	
 
        super(RepoGroupList, self).__init__(obj_list=db_repo_group_list,
 
                    obj_attr='group_name', perm_set=perm_set,
 
                    perm_checker=HasRepoGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class UserGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
 

	
 
        super(UserGroupList, self).__init__(obj_list=db_user_group_list,
 
                    obj_attr='users_group_name', perm_set=perm_set,
 
                    perm_checker=HasUserGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class ScmModel(BaseModel):
 
    """
 
    Generic Scm Model
 
    """
 

	
 
    def __get_repo(self, instance):
 
        cls = Repository
 
        if isinstance(instance, cls):
 
            return instance
 
        elif isinstance(instance, int) or safe_str(instance).isdigit():
 
            return cls.get(instance)
 
        elif isinstance(instance, basestring):
 
            return cls.get_by_repo_name(instance)
 
        elif instance:
 
            raise Exception('given object must be int, basestr or Instance'
 
                            ' of %s got %s' % (type(cls), type(instance)))
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 

	
 
        return q.ui_value
 

	
 
    def repo_scan(self, repos_path=None):
 
        """
 
        Listing of repositories in given path. This path should not be a
 
        repository itself. Return a dictionary of repository objects
 

	
 
        :param repos_path: path to directory containing repositories
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s' % repos_path)
 

	
 
        baseui = make_ui('db')
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path, recursive=True):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for kallithea
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories' % (len(repos)))
 
        return repos
 

	
 
    def get_repos(self, all_repos=None, sort_key=None, simple=False):
 
        """
 
        Get all repos from db and for each repo create it's
 
        Get all repos from db and for each repo create its
 
        backend instance and fill that backed with information from database
 

	
 
        :param all_repos: list of repository names as strings
 
            give specific repositories list, good for filtering
 

	
 
        :param sort_key: initial sorting of repos
 
        :param simple: use SimpleCachedList - one without the SCM info
 
        """
 
        if all_repos is None:
 
            all_repos = self.sa.query(Repository)\
 
                        .filter(Repository.group_id == None)\
 
                        .order_by(func.lower(Repository.repo_name)).all()
 
        if simple:
 
            repo_iter = SimpleCachedRepoList(all_repos,
 
                                             repos_path=self.repos_path,
 
                                             order_by=sort_key)
 
        else:
 
            repo_iter = CachedRepoList(all_repos,
 
                                       repos_path=self.repos_path,
 
                                       order_by=sort_key)
 

	
 
        return repo_iter
 

	
 
    def get_repo_groups(self, all_groups=None):
 
        if all_groups is None:
 
            all_groups = RepoGroup.query()\
 
                .filter(RepoGroup.group_parent_id == None).all()
 
        return [x for x in RepoGroupList(all_groups)]
 

	
 
    def mark_for_invalidation(self, repo_name, delete=False):
 
        """
 
        Mark caches of this repo invalid in the database.
 

	
 
        :param repo_name: the repo for which caches should be marked invalid
 
        """
 
        CacheInvalidation.set_invalidate(repo_name, delete=delete)
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo:
 
            repo.update_changeset_cache()
 

	
 
    def toggle_following_repo(self, follow_repo_id, user_id):
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repo_id == follow_repo_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                action_logger(UserTemp(user_id),
 
                              'stopped_following_repo',
 
                              RepoTemp(follow_repo_id))
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_repo_id = follow_repo_id
 
            self.sa.add(f)
 

	
 
            action_logger(UserTemp(user_id),
 
                          'started_following_repo',
 
                          RepoTemp(follow_repo_id))
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def toggle_following_user(self, follow_user_id, user_id):
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user_id == follow_user_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_user_id = follow_user_id
 
            self.sa.add(f)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def is_following_repo(self, repo_name, user_id, cache=False):
 
        r = self.sa.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repository == r)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def is_following_user(self, username, user_id, cache=False):
 
        u = User.get_by_username(username)
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user == u)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def get_followers(self, repo):
 
        repo = self._get_repo(repo)
 

	
 
        return self.sa.query(UserFollowing)\
 
                .filter(UserFollowing.follows_repository == repo).count()
 

	
 
    def get_forks(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(Repository)\
 
                .filter(Repository.fork == repo).count()
 

	
 
    def get_pull_requests(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(PullRequest)\
 
                .filter(PullRequest.other_repo == repo)\
 
                .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
 

	
 
    def mark_as_fork(self, repo, fork, user):
 
        repo = self.__get_repo(repo)
 
        fork = self.__get_repo(fork)
 
        if fork and repo.repo_id == fork.repo_id:
 
            raise Exception("Cannot set repository as fork of itself")
 

	
 
        if fork and repo.repo_type != fork.repo_type:
 
            raise RepositoryError("Cannot set repository as fork of repository with other type")
 

	
 
        repo.fork = fork
 
        self.sa.add(repo)
 
        return repo
 

	
 
    def _handle_rc_scm_extras(self, username, repo_name, repo_alias,
 
                              action=None):
 
        from kallithea import CONFIG
 
        from kallithea.lib.base import _get_ip_addr
 
        try:
 
            from pylons import request
 
            environ = request.environ
 
        except TypeError:
 
            # we might use this outside of request context, let's fake the
 
            # environ data
 
            from webob import Request
 
            environ = Request.blank('').environ
 
        extras = {
 
            'ip': _get_ip_addr(environ),
 
            'username': username,
 
            'action': action or 'push_local',
 
            'repository': repo_name,
 
            'scm': repo_alias,
 
            'config': CONFIG['__file__'],
 
            'server_url': get_server_url(environ),
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        _set_extras(extras)
 

	
 
    def _handle_push(self, repo, username, action, repo_name, revisions):
 
        """
 
        Triggers push action hooks
 

	
 
        :param repo: SCM repo
 
        :param username: username who pushes
 
        :param action: push/push_loca/push_remote
 
        :param repo_name: name of repo
 
        :param revisions: list of revisions that we pushed
 
        """
 
        self._handle_rc_scm_extras(username, repo_name, repo_alias=repo.alias)
 
        _scm_repo = repo._repo
 
        # trigger push hook
 
        if repo.alias == 'hg':
 
            log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0])
 
        elif repo.alias == 'git':
 
            log_push_action(None, _scm_repo, _git_revs=revisions)
 

	
 
    def _get_IMC_module(self, scm_type):
 
        """
 
        Returns InMemoryCommit class based on scm_type
 

	
 
        :param scm_type:
 
        """
 
        if scm_type == 'hg':
 
            from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset
 
            return MercurialInMemoryChangeset
 

	
 
        if scm_type == 'git':
 
            from kallithea.lib.vcs.backends.git import GitInMemoryChangeset
 
            return GitInMemoryChangeset
 

	
 
        raise Exception('Invalid scm_type, must be one of hg,git got %s'
 
                        % (scm_type,))
 

	
 
    def pull_changes(self, repo, username):
 
        dbrepo = self.__get_repo(repo)
 
        clone_uri = dbrepo.clone_uri
 
        if not clone_uri:
 
            raise Exception("This repository doesn't have a clone uri")
 

	
 
        repo = dbrepo.scm_instance
 
        repo_name = dbrepo.repo_name
 
        try:
 
            if repo.alias == 'git':
 
                repo.fetch(clone_uri)
 
                # git doesn't really have something like post-fetch action
 
                # we fake that now. #TODO: extract fetched revisions somehow
 
                # here
 
                self._handle_push(repo,
 
                                  username=username,
 
                                  action='push_remote',
 
                                  repo_name=repo_name,
 
                                  revisions=[])
 
            else:
 
                self._handle_rc_scm_extras(username, dbrepo.repo_name,
 
                                           repo.alias, action='push_remote')
 
                repo.pull(clone_uri)
 

	
 
            self.mark_for_invalidation(repo_name)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, author, message,
 
                      content, f_path):
 
        """
 
        Commits changes
 

	
 
        :param repo: SCM instance
 

	
 
        """
 
        user = self._get_user(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        # message and author needs to be unicode
 
        # proper backend should then translate that into required type
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        imc = IMC(repo)
 
        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
 
        try:
 
            tip = imc.commit(message=message, author=author,
 
                             parents=[cs], branch=cs.branch)
 
        except Exception, e:
 
            log.error(traceback.format_exc())
 
            raise IMCCommitError(str(e))
 
        finally:
 
            # always clear caches, if commit fails we want fresh object also
 
            self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def _sanitize_path(self, f_path):
 
        if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
 
            raise NonRelativePathError('%s is not an relative path' % f_path)
 
        if f_path:
 
            f_path = os.path.normpath(f_path)
 
        return f_path
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 

	
 
        :param repo_name: name of repository
 
        :param revision: revision for which to list nodes
 
        :param root_path: root path to list
 
        :param flat: return as a list, if False returns a dict with decription
 
        :param flat: return as a list, if False returns a dict with description
 

	
 
        """
 
        _files = list()
 
        _dirs = list()
 
        try:
 
            _repo = self.__get_repo(repo_name)
 
            changeset = _repo.scm_instance.get_changeset(revision)
 
            root_path = root_path.lstrip('/')
 
            for topnode, dirs, files in changeset.walk(root_path):
 
                for f in files:
 
                    _files.append(f.path if flat else {"name": f.path,
 
                                                       "type": "file"})
 
                for d in dirs:
 
                    _dirs.append(d.path if flat else {"name": d.path,
 
                                                      "type": "dir"})
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            raise
 

	
 
        return _dirs, _files
 

	
 
    def create_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits given multiple nodes into repo
 

	
 
        :param user: Kallithea User object or user_id, the commiter
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that commiter only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new commited changeset
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            content = nodes[f_path]['content']
 
            f_path = safe_str(f_path)
 
            # decoding here will force that we have proper encoded values
 
            # in any other case this will throw exceptions and deny commit
 
            if isinstance(content, (basestring,)):
 
                content = safe_str(content)
 
            elif isinstance(content, (file, cStringIO.OutputType,)):
 
                content = content.read()
 
            else:
 
                raise Exception('Content is of unrecognized type %s' % (
 
                    type(content)
 
                ))
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        commiter = user.full_contact
 
        author = safe_unicode(author) if author else commiter
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.add(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def update_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        message = safe_unicode(message)
 
        commiter = user.full_contact
 
        author = safe_unicode(author) if author else commiter
 

	
 
        imc_class = self._get_IMC_module(scm_instance.alias)
 
        imc = imc_class(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 

	
 
        # add multiple nodes
 
        for _filename, data in nodes.items():
 
            # new filename, can be renamed from the old one
 
            filename = self._sanitize_path(data['filename'])
 
            old_filename = self._sanitize_path(_filename)
 
            content = data['content']
 

	
 
            filenode = FileNode(old_filename, content=content)
 
            op = data['op']
 
            if op == 'add':
 
                imc.add(filenode)
 
            elif op == 'del':
 
                imc.remove(filenode)
 
            elif op == 'mod':
 
                if filename != old_filename:
 
                    #TODO: handle renames, needs vcs lib changes
 
                    imc.remove(filenode)
 
                    imc.add(FileNode(filename, content=content))
 
                else:
 
                    imc.change(filenode)
 

	
 
        # commit changes
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 

	
 
    def delete_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Deletes given multiple nodes into repo
 

	
 
        :param user: Kallithea User object or user_id, the commiter
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that commiter only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new commited changeset after deletion
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            # content can be empty but for compatabilty it allows same dicts
 
            # content can be empty but for compatibility it allows same dicts
 
            # structure as add_nodes
 
            content = nodes[f_path].get('content')
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        commiter = user.full_contact
 
        author = safe_unicode(author) if author else commiter
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.remove(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def get_unread_journal(self):
 
        return self.sa.query(UserLog).count()
 

	
 
    def get_repo_landing_revs(self, repo=None):
 
        """
 
        Generates select option with tags branches and bookmarks (for hg only)
 
        grouped by type
 

	
 
        :param repo:
 
        """
 

	
 
        hist_l = []
 
        choices = []
 
        repo = self.__get_repo(repo)
 
        hist_l.append(['rev:tip', _('latest tip')])
 
        choices.append('rev:tip')
 
        if not repo:
 
            return choices, hist_l
 

	
 
        repo = repo.scm_instance
 

	
 
        branches_group = ([(u'branch:%s' % k, k) for k, v in
 
                           repo.branches.iteritems()], _("Branches"))
 
        hist_l.append(branches_group)
 
        choices.extend([x[0] for x in branches_group[0]])
 

	
 
        if repo.alias == 'hg':
 
            bookmarks_group = ([(u'book:%s' % k, k) for k, v in
 
                                repo.bookmarks.iteritems()], _("Bookmarks"))
 
            hist_l.append(bookmarks_group)
 
            choices.extend([x[0] for x in bookmarks_group[0]])
 

	
 
        tags_group = ([(u'tag:%s' % k, k) for k, v in
 
                       repo.tags.iteritems()], _("Tags"))
 
        hist_l.append(tags_group)
 
        choices.extend([x[0] for x in tags_group[0]])
 

	
 
        return choices, hist_l
 

	
 
    def install_git_hook(self, repo, force_create=False):
 
        """
 
        Creates a kallithea hook inside a git repository
 

	
 
        :param repo: Instance of VCS repo
 
        :param force_create: Create even if same name hook exists
 
        """
 

	
 
        loc = jn(repo.path, 'hooks')
 
        if not repo.bare:
 
            loc = jn(repo.path, '.git', 'hooks')
 
        if not os.path.isdir(loc):
 
            os.makedirs(loc)
 

	
 
        tmpl_post = pkg_resources.resource_string(
 
            'kallithea', jn('config', 'post_receive_tmpl.py')
 
        )
 
        tmpl_pre = pkg_resources.resource_string(
 
            'kallithea', jn('config', 'pre_receive_tmpl.py')
 
        )
 

	
 
        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
 
            _hook_file = jn(loc, '%s-receive' % h_type)
 
            has_hook = False
 
            log.debug('Installing git hook in repo %s' % repo)
 
            if os.path.exists(_hook_file):
 
                # let's take a look at this hook, maybe it's kallithea ?
 
                log.debug('hook exists, checking if it is from kallithea')
 
                with open(_hook_file, 'rb') as f:
 
                    data = f.read()
 
                    matches = re.compile(r'(?:%s)\s*=\s*(.*)'
 
                                         % 'KALLITHEA_HOOK_VER').search(data)
 
                    if matches:
 
                        try:
 
                            ver = matches.groups()[0]
 
                            log.debug('got %s it is kallithea' % (ver))
 
                            has_hook = True
 
                        except Exception:
 
                            log.error(traceback.format_exc())
 
            else:
 
                # there is no hook in this dir, so we want to create one
 
                has_hook = True
 

	
 
            if has_hook or force_create:
 
                log.debug('writing %s hook file !' % (h_type,))
 
                try:
 
                    with open(_hook_file, 'wb') as f:
 
                        tmpl = tmpl.replace('_TMPL_', kallithea.__version__)
 
                        f.write(tmpl)
 
                    os.chmod(_hook_file, 0755)
 
                except IOError, e:
 
                    log.error('error writing %s: %s' % (_hook_file, e))
 
            else:
 
                log.debug('skipping writing hook file')
kallithea/model/user.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.user
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
users model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import traceback
 
from pylons.i18n.translation import _
 

	
 
from sqlalchemy.exc import DatabaseError
 

	
 
from kallithea import EXTERN_TYPE_INTERNAL
 
from kallithea.lib.utils2 import safe_unicode, generate_api_key, get_current_authuser
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.model import BaseModel
 
from kallithea.model.db import User, UserToPerm, Notification, \
 
    UserEmailMap, UserIpMap
 
from kallithea.lib.exceptions import DefaultUserException, \
 
    UserOwnsReposException
 
from kallithea.model.meta import Session
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserModel(BaseModel):
 
    cls = User
 

	
 
    def get(self, user_id, cache=False):
 
        user = self.sa.query(User)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % user_id))
 
        return user.get(user_id)
 

	
 
    def get_user(self, user):
 
        return self._get_user(user)
 

	
 
    def get_by_username(self, username, cache=False, case_insensitive=False):
 

	
 
        if case_insensitive:
 
            user = self.sa.query(User).filter(User.username.ilike(username))
 
        else:
 
            user = self.sa.query(User)\
 
                .filter(User.username == username)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % username))
 
        return user.scalar()
 

	
 
    def get_by_email(self, email, cache=False, case_insensitive=False):
 
        return User.get_by_email(email, case_insensitive, cache)
 

	
 
    def get_by_api_key(self, api_key, cache=False):
 
        return User.get_by_api_key(api_key, cache)
 

	
 
    def create(self, form_data, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.hooks import log_create_user, check_allowed_create_user
 
        _fd = form_data
 
        user_data = {
 
            'username': _fd['username'], 'password': _fd['password'],
 
            'email': _fd['email'], 'firstname': _fd['firstname'], 'lastname': _fd['lastname'],
 
            'active': _fd['active'], 'admin': False
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 
            new_user = User()
 
            for k, v in form_data.items():
 
                if k == 'password':
 
                    v = get_crypt_password(v)
 
                if k == 'firstname':
 
                    k = 'name'
 
                setattr(new_user, k, v)
 

	
 
            new_user.api_key = generate_api_key(form_data['username'])
 
            self.sa.add(new_user)
 

	
 
            log_create_user(new_user.get_dict(), cur_user)
 
            return new_user
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_or_update(self, username, password, email, firstname='',
 
                         lastname='', active=True, admin=False,
 
                         extern_type=None, extern_name=None, cur_user=None):
 
        """
 
        Creates a new instance if not found, or updates current one
 

	
 
        :param username:
 
        :param password:
 
        :param email:
 
        :param active:
 
        :param firstname:
 
        :param lastname:
 
        :param active:
 
        :param admin:
 
        :param extern_name:
 
        :param extern_type:
 
        :param cur_user:
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.auth import get_crypt_password, check_password
 
        from kallithea.lib.hooks import log_create_user, check_allowed_create_user
 
        user_data = {
 
            'username': username, 'password': password,
 
            'email': email, 'firstname': firstname, 'lastname': lastname,
 
            'active': active, 'admin': admin
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 

	
 
        log.debug('Checking for %s account in Kallithea database' % username)
 
        user = User.get_by_username(username, case_insensitive=True)
 
        if user is None:
 
            log.debug('creating new user %s' % username)
 
            new_user = User()
 
            edit = False
 
        else:
 
            log.debug('updating user %s' % username)
 
            new_user = user
 
            edit = True
 

	
 
        try:
 
            new_user.username = username
 
            new_user.admin = admin
 
            new_user.email = email
 
            new_user.active = active
 
            new_user.extern_name = safe_unicode(extern_name) if extern_name else None
 
            new_user.extern_type = safe_unicode(extern_type) if extern_type else None
 
            new_user.name = firstname
 
            new_user.lastname = lastname
 

	
 
            if not edit:
 
                new_user.api_key = generate_api_key(username)
 

	
 
            # set password only if creating an user or password is changed
 
            password_change = new_user.password and not check_password(password,
 
                                                            new_user.password)
 
            if not edit or password_change:
 
                reason = 'new password' if edit else 'new user'
 
                log.debug('Updating password reason=>%s' % (reason,))
 
                new_user.password = get_crypt_password(password) if password else None
 

	
 
            self.sa.add(new_user)
 

	
 
            if not edit:
 
                log_create_user(new_user.get_dict(), cur_user)
 
            return new_user
 
        except (DatabaseError,):
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_registration(self, form_data):
 
        from kallithea.model.notification import NotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        try:
 
            form_data['admin'] = False
 
            form_data['extern_name'] = EXTERN_TYPE_INTERNAL
 
            form_data['extern_type'] = EXTERN_TYPE_INTERNAL
 
            new_user = self.create(form_data)
 

	
 
            self.sa.add(new_user)
 
            self.sa.flush()
 

	
 
            # notification to admins
 
            subject = _('New user registration')
 
            body = ('New user registration\n'
 
                    '---------------------\n'
 
                    '- Username: %s\n'
 
                    '- Full Name: %s\n'
 
                    '- Email: %s\n')
 
            body = body % (new_user.username, new_user.full_name, new_user.email)
 
            edit_url = h.canonical_url('edit_user', id=new_user.user_id)
 
            email_kwargs = {'registered_user_url': edit_url, 'new_username': new_user.username}
 
            NotificationModel().create(created_by=new_user, subject=subject,
 
                                       body=body, recipients=None,
 
                                       type_=Notification.TYPE_REGISTRATION,
 
                                       email_kwargs=email_kwargs)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update(self, user_id, form_data, skip_attrs=[]):
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 
            user = self.get(user_id, cache=False)
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                                _("You can't Edit this user since it's "
 
                                  "crucial for entire application"))
 

	
 
            for k, v in form_data.items():
 
                if k in skip_attrs:
 
                    continue
 
                if k == 'new_password' and v:
 
                    user.password = get_crypt_password(v)
 
                else:
 
                    # old legacy thing orm models store firstname as name,
 
                    # need proper refactor to username
 
                    if k == 'firstname':
 
                        k = 'name'
 
                    setattr(user, k, v)
 
            self.sa.add(user)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update_user(self, user, **kwargs):
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 
            user = self._get_user(user)
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                    _("You can't Edit this user since it's"
 
                      " crucial for entire application")
 
                )
 

	
 
            for k, v in kwargs.items():
 
                if k == 'password' and v:
 
                    v = get_crypt_password(v)
 

	
 
                setattr(user, k, v)
 
            self.sa.add(user)
 
            return user
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def delete(self, user, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        user = self._get_user(user)
 

	
 
        try:
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                    _(u"You can't remove this user since it's"
 
                      " crucial for entire application")
 
                )
 
            if user.repositories:
 
                repos = [x.repo_name for x in user.repositories]
 
                raise UserOwnsReposException(
 
                    _(u'user "%s" still owns %s repositories and cannot be '
 
                      'removed. Switch owners or remove those repositories. %s')
 
                    % (user.username, len(repos), ', '.join(repos))
 
                )
 
            self.sa.delete(user)
 

	
 
            from kallithea.lib.hooks import log_delete_user
 
            log_delete_user(user.get_dict(), cur_user)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def reset_password_link(self, data):
 
        from kallithea.lib.celerylib import tasks, run_task
 
        from kallithea.model.notification import EmailNotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        user_email = data['email']
 
        try:
 
            user = User.get_by_email(user_email)
 
            if user:
 
                log.debug('password reset user found %s' % user)
 
                link = h.canonical_url('reset_password_confirmation', key=user.api_key)
 
                reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
 
                body = EmailNotificationModel().get_email_tmpl(reg_type,
 
                                                               user=user.short_contact,
 
                                                               reset_url=link)
 
                log.debug('sending email')
 
                run_task(tasks.send_email, [user_email],
 
                         _("Password reset link"), body, body)
 
                log.info('send new password mail to %s' % user_email)
 
            else:
 
                log.debug("password reset email %s not found" % user_email)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return False
 

	
 
        return True
 

	
 
    def reset_password(self, data):
 
        from kallithea.lib.celerylib import tasks, run_task
 
        from kallithea.lib import auth
 
        user_email = data['email']
 
        pre_db = True
 
        try:
 
            user = User.get_by_email(user_email)
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                            auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                Session().add(user)
 
                Session().commit()
 
                log.info('change password for %s' % user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 

	
 
            pre_db = False
 
            run_task(tasks.send_email, [user_email],
 
                     _('Your new password'),
 
                     _('Your new Kallithea password:%s') % (new_passwd,))
 
            log.info('send new password mail to %s' % user_email)
 

	
 
        except Exception:
 
            log.error('Failed to update user password')
 
            log.error(traceback.format_exc())
 
            if pre_db:
 
                # we rollback only if local db stuff fails. If it goes into
 
                # run_task, we're pass rollback state this wouldn't work then
 
                Session().rollback()
 

	
 
        return True
 

	
 
    def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
 
        """
 
        Fetches auth_user by user_id,or api_key if present.
 
        Fills auth_user attributes with those taken from database.
 
        Additionally set's is_authenitated if lookup fails
 
        Additionally sets is_authenitated if lookup fails
 
        present in database
 

	
 
        :param auth_user: instance of user to set attributes
 
        :param user_id: user id to fetch by
 
        :param api_key: api key to fetch by
 
        :param username: username to fetch by
 
        """
 
        if user_id is None and api_key is None and username is None:
 
            raise Exception('You need to pass user_id, api_key or username')
 

	
 
        try:
 
            dbuser = None
 
            if user_id:
 
                dbuser = self.get(user_id)
 
            elif api_key:
 
                dbuser = self.get_by_api_key(api_key)
 
            elif username:
 
                dbuser = self.get_by_username(username)
 

	
 
            if dbuser is not None and dbuser.active:
 
                log.debug('filling %s data' % dbuser)
 
                for k, v in dbuser.get_dict().iteritems():
 
                    if k not in ['api_keys', 'permissions']:
 
                        setattr(auth_user, k, v)
 
            else:
 
                return False
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            auth_user.is_authenticated = False
 
            return False
 

	
 
        return True
 

	
 
    def has_perm(self, user, perm):
 
        perm = self._get_perm(perm)
 
        user = self._get_user(user)
 

	
 
        return UserToPerm.query().filter(UserToPerm.user == user)\
 
            .filter(UserToPerm.permission == perm).scalar() is not None
 

	
 
    def grant_perm(self, user, perm):
 
        """
 
        Grant user global permissions
 

	
 
        :param user:
 
        :param perm:
 
        """
 
        user = self._get_user(user)
 
        perm = self._get_perm(perm)
 
        # if this permission is already granted skip it
 
        _perm = UserToPerm.query()\
 
            .filter(UserToPerm.user == user)\
 
            .filter(UserToPerm.permission == perm)\
 
            .scalar()
 
        if _perm:
 
            return
 
        new = UserToPerm()
 
        new.user = user
 
        new.permission = perm
 
        self.sa.add(new)
 
        return new
 

	
 
    def revoke_perm(self, user, perm):
 
        """
 
        Revoke users global permissions
 

	
 
        :param user:
 
        :param perm:
 
        """
 
        user = self._get_user(user)
 
        perm = self._get_perm(perm)
 

	
 
        obj = UserToPerm.query()\
 
                .filter(UserToPerm.user == user)\
 
                .filter(UserToPerm.permission == perm)\
 
                .scalar()
 
        if obj:
 
            self.sa.delete(obj)
 

	
 
    def add_extra_email(self, user, email):
 
        """
 
        Adds email address to UserEmailMap
 

	
 
        :param user:
 
        :param email:
 
        """
 
        from kallithea.model import forms
 
        form = forms.UserExtraEmailForm()()
 
        data = form.to_python(dict(email=email))
 
        user = self._get_user(user)
 

	
 
        obj = UserEmailMap()
 
        obj.user = user
 
        obj.email = data['email']
 
        self.sa.add(obj)
 
        return obj
 

	
 
    def delete_extra_email(self, user, email_id):
 
        """
 
        Removes email address from UserEmailMap
 

	
 
        :param user:
 
        :param email_id:
 
        """
 
        user = self._get_user(user)
 
        obj = UserEmailMap.query().get(email_id)
 
        if obj:
 
            self.sa.delete(obj)
 

	
 
    def add_extra_ip(self, user, ip):
 
        """
 
        Adds ip address to UserIpMap
 

	
 
        :param user:
 
        :param ip:
 
        """
 
        from kallithea.model import forms
 
        form = forms.UserExtraIpForm()()
 
        data = form.to_python(dict(ip=ip))
 
        user = self._get_user(user)
 

	
 
        obj = UserIpMap()
 
        obj.user = user
 
        obj.ip_addr = data['ip']
 
        self.sa.add(obj)
 
        return obj
 

	
 
    def delete_extra_ip(self, user, ip_id):
 
        """
 
        Removes ip address from UserIpMap
 

	
 
        :param user:
 
        :param ip_id:
 
        """
 
        user = self._get_user(user)
 
        obj = UserIpMap.query().get(ip_id)
 
        if obj:
 
            self.sa.delete(obj)
0 comments (0 inline, 0 general)