Changeset - 238486bb71ab
[Not reviewed]
beta
0 9 0
Marcin Kuzminski - 13 years ago 2013-03-24 03:07:04
marcin@python-works.com
Switched handling of RhodeCode extra params in consistent way
- use only os.environ for both hg and git
- hacking ui objects was bad
- os.environ allows easier to call rhodecode hooks externally
9 files changed with 74 insertions and 160 deletions:
0 comments (0 inline, 0 general)
rhodecode/lib/hooks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.hooks
 
    ~~~~~~~~~~~~~~~~~~~
 

	
 
    Hooks runned by rhodecode
 

	
 
    :created_on: Aug 6, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import os
 
import sys
 
import time
 
import binascii
 
import traceback
 
from inspect import isfunction
 

	
 
from mercurial.scmutil import revrange
 
from mercurial.node import nullrev
 

	
 
from rhodecode.lib import helpers as h
 
from rhodecode.lib.utils import action_logger
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 
from rhodecode.lib.compat import json
 
from rhodecode.lib.exceptions import HTTPLockedRC
 
from rhodecode.lib.utils2 import safe_str
 
from rhodecode.lib.utils2 import safe_str, _extract_extras
 
from rhodecode.model.db import Repository, User
 

	
 

	
 
def _get_scm_size(alias, root_path):
 

	
 
    if not alias.startswith('.'):
 
        alias += '.'
 

	
 
    size_scm, size_root = 0, 0
 
    for path, dirs, files in os.walk(safe_str(root_path)):
 
        if path.find(alias) != -1:
 
            for f in files:
 
                try:
 
                    size_scm += os.path.getsize(os.path.join(path, f))
 
                except OSError:
 
                    pass
 
        else:
 
            for f in files:
 
                try:
 
                    size_root += os.path.getsize(os.path.join(path, f))
 
                except OSError:
 
                    pass
 

	
 
    size_scm_f = h.format_byte_size(size_scm)
 
    size_root_f = h.format_byte_size(size_root)
 
    size_total_f = h.format_byte_size(size_root + size_scm)
 

	
 
    return size_scm_f, size_root_f, size_total_f
 

	
 

	
 
def repo_size(ui, repo, hooktype=None, **kwargs):
 
    """
 
    Presents size of repository after push
 

	
 
    :param ui:
 
    :param repo:
 
    :param hooktype:
 
    """
 

	
 
    size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
 

	
 
    last_cs = repo[len(repo) - 1]
 

	
 
    msg = ('Repository size .hg:%s repo:%s total:%s\n'
 
           'Last revision is now r%s:%s\n') % (
 
        size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
 
    )
 

	
 
    sys.stdout.write(msg)
 

	
 

	
 
def pre_push(ui, repo, **kwargs):
 
    # pre push function, currently used to ban pushing when
 
    # repository is locked
 
    try:
 
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
 
    except:
 
        rc_extras = {}
 
    extras = dict(repo.ui.configitems('rhodecode_extras'))
 
    ex = _extract_extras()
 

	
 
    if 'username' in extras:
 
        username = extras['username']
 
        repository = extras['repository']
 
        scm = extras['scm']
 
        locked_by = extras['locked_by']
 
    elif 'username' in rc_extras:
 
        username = rc_extras['username']
 
        repository = rc_extras['repository']
 
        scm = rc_extras['scm']
 
        locked_by = rc_extras['locked_by']
 
    else:
 
        raise Exception('Missing data in repo.ui and os.environ')
 

	
 
    usr = User.get_by_username(username)
 
    if locked_by[0] and usr.user_id != int(locked_by[0]):
 
        locked_by = User.get(locked_by[0]).username
 
    usr = User.get_by_username(ex.username)
 
    if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]):
 
        locked_by = User.get(ex.locked_by[0]).username
 
        # this exception is interpreted in git/hg middlewares and based
 
        # on that proper return code is server to client
 
        _http_ret = HTTPLockedRC(repository, locked_by)
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            sys.stdout.write(_http_ret.title)
 
        else:
 
            raise _http_ret
 

	
 

	
 
def pre_pull(ui, repo, **kwargs):
 
    # pre push function, currently used to ban pushing when
 
    # repository is locked
 
    try:
 
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
 
    except:
 
        rc_extras = {}
 
    extras = dict(repo.ui.configitems('rhodecode_extras'))
 
    if 'username' in extras:
 
        username = extras['username']
 
        repository = extras['repository']
 
        scm = extras['scm']
 
        locked_by = extras['locked_by']
 
    elif 'username' in rc_extras:
 
        username = rc_extras['username']
 
        repository = rc_extras['repository']
 
        scm = rc_extras['scm']
 
        locked_by = rc_extras['locked_by']
 
    else:
 
        raise Exception('Missing data in repo.ui and os.environ')
 

	
 
    if locked_by[0]:
 
        locked_by = User.get(locked_by[0]).username
 
    ex = _extract_extras()
 
    if ex.locked_by[0]:
 
        locked_by = User.get(ex.locked_by[0]).username
 
        # this exception is interpreted in git/hg middlewares and based
 
        # on that proper return code is server to client
 
        _http_ret = HTTPLockedRC(repository, locked_by)
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            sys.stdout.write(_http_ret.title)
 
        else:
 
            raise _http_ret
 

	
 

	
 
def log_pull_action(ui, repo, **kwargs):
 
    """
 
    Logs user last pull action
 

	
 
    :param ui:
 
    :param repo:
 
    """
 
    try:
 
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
 
    except:
 
        rc_extras = {}
 
    extras = dict(repo.ui.configitems('rhodecode_extras'))
 
    if 'username' in extras:
 
        username = extras['username']
 
        repository = extras['repository']
 
        scm = extras['scm']
 
        make_lock = extras['make_lock']
 
        locked_by = extras['locked_by']
 
        ip = extras['ip']
 
    elif 'username' in rc_extras:
 
        username = rc_extras['username']
 
        repository = rc_extras['repository']
 
        scm = rc_extras['scm']
 
        make_lock = rc_extras['make_lock']
 
        locked_by = rc_extras['locked_by']
 
        ip = rc_extras['ip']
 
    else:
 
        raise Exception('Missing data in repo.ui and os.environ')
 
    user = User.get_by_username(username)
 
    ex = _extract_extras()
 

	
 
    user = User.get_by_username(ex.username)
 
    action = 'pull'
 
    action_logger(user, action, repository, ip, commit=True)
 
    action_logger(user, action, ex.repository, ex.ip, commit=True)
 
    # extension hook call
 
    from rhodecode import EXTENSIONS
 
    callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
 

	
 
    if isfunction(callback):
 
        kw = {}
 
        kw.update(extras)
 
        kw.update(ex)
 
        callback(**kw)
 

	
 
    if make_lock is True:
 
        Repository.lock(Repository.get_by_repo_name(repository), user.user_id)
 
    if ex.make_lock is True:
 
        Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id)
 
        #msg = 'Made lock on repo `%s`' % repository
 
        #sys.stdout.write(msg)
 

	
 
    if locked_by[0]:
 
        locked_by = User.get(locked_by[0]).username
 
        _http_ret = HTTPLockedRC(repository, locked_by)
 
    if ex.locked_by[0]:
 
        locked_by = User.get(ex.locked_by[0]).username
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            sys.stdout.write(_http_ret.title)
 
    return 0
 

	
 

	
 
def log_push_action(ui, repo, **kwargs):
 
    """
 
    Maps user last push action to new changeset id, from mercurial
 

	
 
    :param ui:
 
    :param repo: repo object containing the `ui` object
 
    """
 

	
 
    try:
 
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
 
    except:
 
        rc_extras = {}
 
    ex = _extract_extras()
 

	
 
    extras = dict(repo.ui.configitems('rhodecode_extras'))
 
    if 'username' in extras:
 
        username = extras['username']
 
        repository = extras['repository']
 
        scm = extras['scm']
 
        make_lock = extras['make_lock']
 
        locked_by = extras['locked_by']
 
        action = extras['action']
 
    elif 'username' in rc_extras:
 
        username = rc_extras['username']
 
        repository = rc_extras['repository']
 
        scm = rc_extras['scm']
 
        make_lock = rc_extras['make_lock']
 
        locked_by = rc_extras['locked_by']
 
        action = extras['action']
 
    else:
 
        raise Exception('Missing data in repo.ui and os.environ')
 
    action = ex.action + ':%s'
 

	
 
    action = action + ':%s'
 

	
 
    if scm == 'hg':
 
    if ex.scm == 'hg':
 
        node = kwargs['node']
 

	
 
        def get_revs(repo, rev_opt):
 
            if rev_opt:
 
                revs = revrange(repo, rev_opt)
 

	
 
                if len(revs) == 0:
 
                    return (nullrev, nullrev)
 
                return (max(revs), min(revs))
 
            else:
 
                return (len(repo) - 1, 0)
 

	
 
        stop, start = get_revs(repo, [node + ':'])
 
        h = binascii.hexlify
 
        revs = [h(repo[r].node()) for r in xrange(start, stop + 1)]
 
    elif scm == 'git':
 
    elif ex.scm == 'git':
 
        revs = kwargs.get('_git_revs', [])
 
        if '_git_revs' in kwargs:
 
            kwargs.pop('_git_revs')
 

	
 
    action = action % ','.join(revs)
 

	
 
    action_logger(username, action, repository, extras['ip'], commit=True)
 
    action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
 

	
 
    # extension hook call
 
    from rhodecode import EXTENSIONS
 
    callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
 
    if isfunction(callback):
 
        kw = {'pushed_revs': revs}
 
        kw.update(extras)
 
        kw.update(ex)
 
        callback(**kw)
 

	
 
    if make_lock is False:
 
        Repository.unlock(Repository.get_by_repo_name(repository))
 
        msg = 'Released lock on repo `%s`\n' % repository
 
    if ex.make_lock is False:
 
        Repository.unlock(Repository.get_by_repo_name(ex.repository))
 
        msg = 'Released lock on repo `%s`\n' % ex.repository
 
        sys.stdout.write(msg)
 

	
 
    if locked_by[0]:
 
        locked_by = User.get(locked_by[0]).username
 
        _http_ret = HTTPLockedRC(repository, locked_by)
 
    if ex.locked_by[0]:
 
        locked_by = User.get(ex.locked_by[0]).username
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            sys.stdout.write(_http_ret.title)
 

	
 
    return 0
 

	
 

	
 
def log_create_repository(repository_dict, created_by, **kwargs):
 
    """
 
    Post create repository Hook. This is a dummy function for admins to re-use
 
    if needed. It's taken from rhodecode-extensions module and executed
 
    if present
 

	
 
    :param repository: dict dump of repository object
 
    :param created_by: username who created repository
 

	
 
    available keys of repository_dict:
 

	
 
     'repo_type',
 
     'description',
 
     'private',
 
     'created_on',
 
     'enable_downloads',
 
     'repo_id',
 
     'user_id',
 
     'enable_statistics',
 
     'clone_uri',
 
     'fork_id',
 
     'group_id',
 
     'repo_name'
 

	
 
    """
 
    from rhodecode import EXTENSIONS
 
    callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None)
 
    if isfunction(callback):
 
        kw = {}
 
        kw.update(repository_dict)
 
        kw.update({'created_by': created_by})
 
        kw.update(kwargs)
 
        return callback(**kw)
 

	
 
    return 0
 

	
 

	
 
def log_delete_repository(repository_dict, deleted_by, **kwargs):
 
    """
 
    Post delete repository Hook. This is a dummy function for admins to re-use
 
    if needed. It's taken from rhodecode-extensions module and executed
 
    if present
 

	
 
    :param repository: dict dump of repository object
 
    :param deleted_by: username who deleted the repository
 

	
 
    available keys of repository_dict:
 

	
 
     'repo_type',
 
     'description',
 
     'private',
 
     'created_on',
 
     'enable_downloads',
 
     'repo_id',
 
     'user_id',
 
     'enable_statistics',
 
     'clone_uri',
 
     'fork_id',
 
     'group_id',
 
     'repo_name'
 

	
 
    """
 
    from rhodecode import EXTENSIONS
 
    callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None)
 
    if isfunction(callback):
 
        kw = {}
 
        kw.update(repository_dict)
 
        kw.update({'deleted_by': deleted_by,
 
                   'deleted_on': time.time()})
 
        kw.update(kwargs)
 
        return callback(**kw)
 

	
 
    return 0
 

	
 

	
 
handle_git_pre_receive = (lambda repo_path, revs, env:
 
    handle_git_receive(repo_path, revs, env, hook_type='pre'))
 
handle_git_post_receive = (lambda repo_path, revs, env:
 
    handle_git_receive(repo_path, revs, env, hook_type='post'))
 

	
 

	
 
def handle_git_receive(repo_path, revs, env, hook_type='post'):
 
    """
 
    A really hacky method that is runned by git post-receive hook and logs
 
    an push action together with pushed revisions. It's executed by subprocess
 
    thus needs all info to be able to create a on the fly pylons enviroment,
 
    connect to database and run the logging code. Hacky as sh*t but works.
 

	
 
    :param repo_path:
 
    :type repo_path:
 
    :param revs:
 
    :type revs:
 
    :param env:
 
    :type env:
 
    """
 
    from paste.deploy import appconfig
 
    from sqlalchemy import engine_from_config
 
    from rhodecode.config.environment import load_environment
 
    from rhodecode.model import init_model
 
    from rhodecode.model.db import RhodeCodeUi
 
    from rhodecode.lib.utils import make_ui
 
    extras = json.loads(env['RHODECODE_EXTRAS'])
 

	
 
    path, ini_name = os.path.split(extras['config'])
 
    conf = appconfig('config:%s' % ini_name, relative_to=path)
 
    load_environment(conf.global_conf, conf.local_conf)
 

	
 
    engine = engine_from_config(conf, 'sqlalchemy.db1.')
 
    init_model(engine)
 

	
 
    baseui = make_ui('db')
 
    # fix if it's not a bare repo
 
    if repo_path.endswith(os.sep + '.git'):
 
        repo_path = repo_path[:-5]
 

	
 
    repo = Repository.get_by_full_path(repo_path)
 
    if not repo:
 
        raise OSError('Repository %s not found in database'
 
                      % (safe_str(repo_path)))
 

	
 
    _hooks = dict(baseui.configitems('hooks')) or {}
 

	
 
    for k, v in extras.items():
 
        baseui.setconfig('rhodecode_extras', k, v)
 
    if hook_type == 'pre':
 
        repo = repo.scm_instance
 
    else:
 
        #post push shouldn't use the cached instance never
 
        repo = repo.scm_instance_no_cache()
 

	
 
    repo.ui = baseui
 

	
 
    if hook_type == 'pre':
 
        pre_push(baseui, repo)
 

	
 
    # if push hook is enabled via web interface
 
    elif hook_type == 'post' and _hooks.get(RhodeCodeUi.HOOK_PUSH):
 

	
 
        rev_data = []
 
        for l in revs:
 
            old_rev, new_rev, ref = l.split(' ')
 
            _ref_data = ref.split('/')
 
            if _ref_data[1] in ['tags', 'heads']:
 
                rev_data.append({'old_rev': old_rev,
 
                                 'new_rev': new_rev,
 
                                 'ref': ref,
 
                                 'type': _ref_data[1],
 
                                 'name': _ref_data[2].strip()})
 

	
 
        git_revs = []
 
        for push_ref  in rev_data:
 
            _type = push_ref['type']
 
            if _type == 'heads':
 
                if push_ref['old_rev'] == EmptyChangeset().raw_id:
 
                    cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'"
 
                    heads = repo.run_git_command(cmd)[0]
 
                    heads = heads.replace(push_ref['ref'], '')
 
                    heads = ' '.join(map(lambda c: c.strip('\n').strip(),
 
                                         heads.splitlines()))
 
                    cmd = (('log %(new_rev)s' % push_ref) +
 
                           ' --reverse --pretty=format:"%H" --not ' + heads)
 
                    git_revs += repo.run_git_command(cmd)[0].splitlines()
 

	
 
                elif push_ref['new_rev'] == EmptyChangeset().raw_id:
 
                    #delete branch case
 
                    git_revs += ['delete_branch=>%s' % push_ref['name']]
 
                else:
 
                    cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) +
 
                           ' --reverse --pretty=format:"%H"')
 
                    git_revs += repo.run_git_command(cmd)[0].splitlines()
 

	
 
            elif _type == 'tags':
 
                git_revs += ['tag=>%s' % push_ref['name']]
 

	
 
        log_push_action(baseui, repo, _git_revs=git_revs)
rhodecode/lib/middleware/pygrack.py
Show inline comments
 
import os
 
import socket
 
import logging
 
import subprocess
 
import traceback
 

	
 
from webob import Request, Response, exc
 

	
 
import rhodecode
 
from rhodecode.lib import subprocessio
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FileWrapper(object):
 

	
 
    def __init__(self, fd, content_length):
 
        self.fd = fd
 
        self.content_length = content_length
 
        self.remain = content_length
 

	
 
    def read(self, size):
 
        if size <= self.remain:
 
            try:
 
                data = self.fd.read(size)
 
            except socket.error:
 
                raise IOError(self)
 
            self.remain -= size
 
        elif self.remain:
 
            data = self.fd.read(self.remain)
 
            self.remain = 0
 
        else:
 
            data = None
 
        return data
 

	
 
    def __repr__(self):
 
        return '<FileWrapper %s len: %s, read: %s>' % (
 
            self.fd, self.content_length, self.content_length - self.remain
 
        )
 

	
 

	
 
class GitRepository(object):
 
    git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
 
    commands = ['git-upload-pack', 'git-receive-pack']
 

	
 
    def __init__(self, repo_name, content_path, extras):
 
        files = set([f.lower() for f in os.listdir(content_path)])
 
        if  not (self.git_folder_signature.intersection(files)
 
                == self.git_folder_signature):
 
            raise OSError('%s missing git signature' % content_path)
 
        self.content_path = content_path
 
        self.valid_accepts = ['application/x-%s-result' %
 
                              c for c in self.commands]
 
        self.repo_name = repo_name
 
        self.extras = extras
 

	
 
    def _get_fixedpath(self, path):
 
        """
 
        Small fix for repo_path
 

	
 
        :param path:
 
        :type path:
 
        """
 
        return path.split(self.repo_name, 1)[-1].strip('/')
 

	
 
    def inforefs(self, request, environ):
 
        """
 
        WSGI Response producer for HTTP GET Git Smart
 
        HTTP /info/refs request.
 
        """
 

	
 
        git_command = request.GET.get('service')
 
        if git_command not in self.commands:
 
            log.debug('command %s not allowed' % git_command)
 
            return exc.HTTPMethodNotAllowed()
 

	
 
        # note to self:
 
        # please, resist the urge to add '\n' to git capture and increment
 
        # line count by 1.
 
        # The code in Git client not only does NOT need '\n', but actually
 
        # blows up if you sprinkle "flush" (0000) as "0001\n".
 
        # It reads binary, per number of bytes specified.
 
        # if you do add '\n' as part of data, count it.
 
        server_advert = '# service=%s' % git_command
 
        packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        try:
 
            out = subprocessio.SubprocessIOChunker(
 
                r'%s %s --stateless-rpc --advertise-refs "%s"' % (
 
                            _git_path, git_command[4:], self.content_path),
 
                starting_values=[
 
                    packet_len + server_advert + '0000'
 
                ]
 
            )
 
        except EnvironmentError, e:
 
            log.error(traceback.format_exc())
 
            raise exc.HTTPExpectationFailed()
 
        resp = Response()
 
        resp.content_type = 'application/x-%s-advertisement' % str(git_command)
 
        resp.charset = None
 
        resp.app_iter = out
 
        return resp
 

	
 
    def backend(self, request, environ):
 
        """
 
        WSGI Response producer for HTTP POST Git Smart HTTP requests.
 
        Reads commands and data from HTTP POST's body.
 
        returns an iterator obj with contents of git command's
 
        response to stdout
 
        """
 
        git_command = self._get_fixedpath(request.path_info)
 
        if git_command not in self.commands:
 
            log.debug('command %s not allowed' % git_command)
 
            return exc.HTTPMethodNotAllowed()
 

	
 
        if 'CONTENT_LENGTH' in environ:
 
            inputstream = FileWrapper(environ['wsgi.input'],
 
                                      request.content_length)
 
        else:
 
            inputstream = environ['wsgi.input']
 

	
 
        try:
 
            gitenv = os.environ
 
            from rhodecode.lib.compat import json
 
            gitenv['RHODECODE_EXTRAS'] = json.dumps(self.extras)
 
            # forget all configs
 
            gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 
            opts = dict(
 
                env=gitenv,
 
                cwd=os.getcwd()
 
            )
 
            cmd = r'git %s --stateless-rpc "%s"' % (git_command[4:],
 
                                                    self.content_path),
 
            log.debug('handling cmd %s' % cmd)
 
            out = subprocessio.SubprocessIOChunker(
 
                cmd,
 
                inputstream=inputstream,
 
                **opts
 
            )
 
        except EnvironmentError, e:
 
            log.error(traceback.format_exc())
 
            raise exc.HTTPExpectationFailed()
 

	
 
        if git_command in [u'git-receive-pack']:
 
            # updating refs manually after each push.
 
            # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
 
            _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
            cmd = (u'%s --git-dir "%s" '
 
                    'update-server-info' % (_git_path, self.content_path))
 
            log.debug('handling cmd %s' % cmd)
 
            subprocess.call(cmd, shell=True)
 

	
 
        resp = Response()
 
        resp.content_type = 'application/x-%s-result' % git_command.encode('utf8')
 
        resp.charset = None
 
        resp.app_iter = out
 
        return resp
 

	
 
    def __call__(self, environ, start_response):
 
        request = Request(environ)
 
        _path = self._get_fixedpath(request.path_info)
 
        if _path.startswith('info/refs'):
 
            app = self.inforefs
 
        elif [a for a in self.valid_accepts if a in request.accept]:
 
            app = self.backend
 
        try:
 
            resp = app(request, environ)
 
        except exc.HTTPException, e:
 
            resp = e
 
            log.error(traceback.format_exc())
 
        except Exception, e:
 
            log.error(traceback.format_exc())
 
            resp = exc.HTTPInternalServerError()
 
        return resp(environ, start_response)
 

	
 

	
 
class GitDirectory(object):
 

	
 
    def __init__(self, repo_root, repo_name, extras):
 
        repo_location = os.path.join(repo_root, repo_name)
 
        if not os.path.isdir(repo_location):
 
            raise OSError(repo_location)
 

	
 
        self.content_path = repo_location
 
        self.repo_name = repo_name
 
        self.repo_location = repo_location
 
        self.extras = extras
 

	
 
    def __call__(self, environ, start_response):
 
        content_path = self.content_path
 
        try:
 
            app = GitRepository(self.repo_name, content_path, self.extras)
 
        except (AssertionError, OSError):
 
            content_path = os.path.join(content_path, '.git')
 
            if os.path.isdir(content_path):
 
                app = GitRepository(self.repo_name, content_path, self.extras)
 
            else:
 
                return exc.HTTPNotFound()(environ, start_response)
 
        return app(environ, start_response)
 

	
 

	
 
def make_wsgi_app(repo_name, repo_root, extras):
 
    return GitDirectory(repo_root, repo_name, extras)
rhodecode/lib/middleware/simplegit.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.middleware.simplegit
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    SimpleGit middleware for handling git protocol request (push/clone etc.)
 
    It's implemented with basic auth function
 

	
 
    :created_on: Apr 28, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import re
 
import logging
 
import traceback
 

	
 
from dulwich import server as dulserver
 
from dulwich.web import LimitedInputFilter, GunzipFilter
 
from rhodecode.lib.exceptions import HTTPLockedRC
 
from rhodecode.lib.hooks import pre_pull
 

	
 

	
 
class SimpleGitUploadPackHandler(dulserver.UploadPackHandler):
 

	
 
    def handle(self):
 
        write = lambda x: self.proto.write_sideband(1, x)
 

	
 
        graph_walker = dulserver.ProtocolGraphWalker(self,
 
                                                     self.repo.object_store,
 
                                                     self.repo.get_peeled)
 
        objects_iter = self.repo.fetch_objects(
 
          graph_walker.determine_wants, graph_walker, self.progress,
 
          get_tagged=self.get_tagged)
 

	
 
        # Did the process short-circuit (e.g. in a stateless RPC call)? Note
 
        # that the client still expects a 0-object pack in most cases.
 
        if objects_iter is None:
 
            return
 

	
 
        self.progress("counting objects: %d, done.\n" % len(objects_iter))
 
        dulserver.write_pack_objects(dulserver.ProtocolFile(None, write),
 
                                     objects_iter)
 
        messages = []
 
        messages.append('thank you for using rhodecode')
 

	
 
        for msg in messages:
 
            self.progress(msg + "\n")
 
        # we are done
 
        self.proto.write("0000")
 

	
 

	
 
dulserver.DEFAULT_HANDLERS = {
 
  #git-ls-remote, git-clone, git-fetch and git-pull
 
  'git-upload-pack': SimpleGitUploadPackHandler,
 
  #git-push
 
  'git-receive-pack': dulserver.ReceivePackHandler,
 
}
 

	
 
# not used for now until dulwich get's fixed
 
#from dulwich.repo import Repo
 
#from dulwich.web import make_wsgi_chain
 

	
 
from paste.httpheaders import REMOTE_USER, AUTH_TYPE
 
from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError, \
 
    HTTPBadRequest, HTTPNotAcceptable
 

	
 
from rhodecode.lib.utils2 import safe_str, fix_PATH, get_server_url
 
from rhodecode.lib.utils2 import safe_str, fix_PATH, get_server_url,\
 
    _set_extras
 
from rhodecode.lib.base import BaseVCSController
 
from rhodecode.lib.auth import get_container_username
 
from rhodecode.lib.utils import is_valid_repo, make_ui
 
from rhodecode.lib.compat import json
 
from rhodecode.model.db import User, RhodeCodeUi
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)')
 

	
 

	
 
def is_git(environ):
 
    path_info = environ['PATH_INFO']
 
    isgit_path = GIT_PROTO_PAT.match(path_info)
 
    log.debug('pathinfo: %s detected as GIT %s' % (
 
        path_info, isgit_path != None)
 
    )
 
    return isgit_path
 

	
 

	
 
class SimpleGit(BaseVCSController):
 

	
 
    def _handle_request(self, environ, start_response):
 
        if not is_git(environ):
 
            return self.application(environ, start_response)
 
        if not self._check_ssl(environ, start_response):
 
            return HTTPNotAcceptable('SSL REQUIRED !')(environ, start_response)
 

	
 
        ip_addr = self._get_ip_addr(environ)
 
        username = None
 
        self._git_first_op = False
 
        # skip passing error to error controller
 
        environ['pylons.status_code_redirect'] = True
 

	
 
        #======================================================================
 
        # EXTRACT REPOSITORY NAME FROM ENV
 
        #======================================================================
 
        try:
 
            repo_name = self.__get_repository(environ)
 
            log.debug('Extracted repo name is %s' % repo_name)
 
        except:
 
            return HTTPInternalServerError()(environ, start_response)
 

	
 
        # quick check if that dir exists...
 
        if is_valid_repo(repo_name, self.basepath, 'git') is False:
 
            return HTTPNotFound()(environ, start_response)
 

	
 
        #======================================================================
 
        # GET ACTION PULL or PUSH
 
        #======================================================================
 
        action = self.__get_action(environ)
 

	
 
        #======================================================================
 
        # CHECK ANONYMOUS PERMISSION
 
        #======================================================================
 
        if action in ['pull', 'push']:
 
            anonymous_user = self.__get_user('default')
 
            username = anonymous_user.username
 
            anonymous_perm = self._check_permission(action, anonymous_user,
 
                                                    repo_name, ip_addr)
 

	
 
            if anonymous_perm is not True or anonymous_user.active is False:
 
                if anonymous_perm is not True:
 
                    log.debug('Not enough credentials to access this '
 
                              'repository as anonymous user')
 
                if anonymous_user.active is False:
 
                    log.debug('Anonymous access is disabled, running '
 
                              'authentication')
 
                #==============================================================
 
                # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
 
                # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
 
                #==============================================================
 

	
 
                # Attempting to retrieve username from the container
 
                username = get_container_username(environ, self.config)
 

	
 
                # If not authenticated by the container, running basic auth
 
                if not username:
 
                    self.authenticate.realm = \
 
                        safe_str(self.config['rhodecode_realm'])
 
                    result = self.authenticate(environ)
 
                    if isinstance(result, str):
 
                        AUTH_TYPE.update(environ, 'basic')
 
                        REMOTE_USER.update(environ, result)
 
                        username = result
 
                    else:
 
                        return result.wsgi_application(environ, start_response)
 

	
 
                #==============================================================
 
                # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
 
                #==============================================================
 
                try:
 
                    user = self.__get_user(username)
 
                    if user is None or not user.active:
 
                        return HTTPForbidden()(environ, start_response)
 
                    username = user.username
 
                except:
 
                    log.error(traceback.format_exc())
 
                    return HTTPInternalServerError()(environ, start_response)
 

	
 
                #check permissions for this repository
 
                perm = self._check_permission(action, user, repo_name, ip_addr)
 
                if perm is not True:
 
                    return HTTPForbidden()(environ, start_response)
 

	
 
        # extras are injected into UI object and later available
 
        # in hooks executed by rhodecode
 
        from rhodecode import CONFIG
 
        server_url = get_server_url(environ)
 
        extras = {
 
            'ip': ip_addr,
 
            'username': username,
 
            'action': action,
 
            'repository': repo_name,
 
            'scm': 'git',
 
            'config': CONFIG['__file__'],
 
            'server_url': server_url,
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 

	
 
        #===================================================================
 
        # GIT REQUEST HANDLING
 
        #===================================================================
 
        repo_path = os.path.join(safe_str(self.basepath), safe_str(repo_name))
 
        log.debug('Repository path is %s' % repo_path)
 

	
 
        # CHECK LOCKING only if it's not ANONYMOUS USER
 
        if username != User.DEFAULT_USER:
 
            log.debug('Checking locking on repository')
 
            (make_lock,
 
             locked,
 
             locked_by) = self._check_locking_state(
 
                            environ=environ, action=action,
 
                            repo=repo_name, user_id=user.user_id
 
                       )
 
            # store the make_lock for later evaluation in hooks
 
            extras.update({'make_lock': make_lock,
 
                           'locked_by': locked_by})
 
        # set the environ variables for this request
 
        os.environ['RC_SCM_DATA'] = json.dumps(extras)
 
        fix_PATH()
 
        log.debug('HOOKS extras is %s' % extras)
 
        baseui = make_ui('db')
 
        self.__inject_extras(repo_path, baseui, extras)
 

	
 
        try:
 
            self._handle_githooks(repo_name, action, baseui, environ)
 
            log.info('%s action on GIT repo "%s" by "%s" from %s' %
 
                     (action, repo_name, username, ip_addr))
 
            app = self.__make_app(repo_name, repo_path, extras)
 
            return app(environ, start_response)
 
        except HTTPLockedRC, e:
 
            _code = CONFIG.get('lock_ret_code')
 
            log.debug('Repository LOCKED ret code %s!' % (_code))
 
            return e(environ, start_response)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return HTTPInternalServerError()(environ, start_response)
 
        finally:
 
            # invalidate cache on push
 
            if action == 'push':
 
                self._invalidate_cache(repo_name)
 

	
 
    def __make_app(self, repo_name, repo_path, extras):
 
        """
 
        Make an wsgi application using dulserver
 

	
 
        :param repo_name: name of the repository
 
        :param repo_path: full path to the repository
 
        """
 

	
 
        from rhodecode.lib.middleware.pygrack import make_wsgi_app
 
        app = make_wsgi_app(
 
            repo_root=safe_str(self.basepath),
 
            repo_name=repo_name,
 
            extras=extras,
 
        )
 
        app = GunzipFilter(LimitedInputFilter(app))
 
        return app
 

	
 
    def __get_repository(self, environ):
 
        """
 
        Get's repository name out of PATH_INFO header
 

	
 
        :param environ: environ where PATH_INFO is stored
 
        """
 
        try:
 
            environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO'])
 
            repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1)
 
        except:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
        return repo_name
 

	
 
    def __get_user(self, username):
 
        return User.get_by_username(username)
 

	
 
    def __get_action(self, environ):
 
        """
 
        Maps git request commands into a pull or push command.
 

	
 
        :param environ:
 
        """
 
        service = environ['QUERY_STRING'].split('=')
 

	
 
        if len(service) > 1:
 
            service_cmd = service[1]
 
            mapping = {
 
                'git-receive-pack': 'push',
 
                'git-upload-pack': 'pull',
 
            }
 
            op = mapping[service_cmd]
 
            self._git_stored_op = op
 
            return op
 
        else:
 
            # try to fallback to stored variable as we don't know if the last
 
            # operation is pull/push
 
            op = getattr(self, '_git_stored_op', 'pull')
 
        return op
 

	
 
    def _handle_githooks(self, repo_name, action, baseui, environ):
 
        """
 
        Handles pull action, push is handled by post-receive hook
 
        """
 
        from rhodecode.lib.hooks import log_pull_action
 
        service = environ['QUERY_STRING'].split('=')
 

	
 
        if len(service) < 2:
 
            return
 

	
 
        from rhodecode.model.db import Repository
 
        _repo = Repository.get_by_repo_name(repo_name)
 
        _repo = _repo.scm_instance
 
        _repo._repo.ui = baseui
 

	
 
        _hooks = dict(baseui.configitems('hooks')) or {}
 
        if action == 'pull':
 
            # stupid git, emulate pre-pull hook !
 
            pre_pull(ui=baseui, repo=_repo._repo)
 
        if action == 'pull' and _hooks.get(RhodeCodeUi.HOOK_PULL):
 
            log_pull_action(ui=baseui, repo=_repo._repo)
 

	
 
    def __inject_extras(self, repo_path, baseui, extras={}):
 
        """
 
        Injects some extra params into baseui instance
 

	
 
        :param baseui: baseui instance
 
        :param extras: dict with extra params to put into baseui
 
        """
 

	
 
        # make our hgweb quiet so it doesn't print output
 
        baseui.setconfig('ui', 'quiet', 'true')
 

	
 
        #inject some additional parameters that will be available in ui
 
        #for hooks
 
        for k, v in extras.items():
 
            baseui.setconfig('rhodecode_extras', k, v)
 
        _set_extras(extras)
rhodecode/lib/middleware/simplehg.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.middleware.simplehg
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    SimpleHG middleware for handling mercurial protocol request
 
    (push/clone etc.). It's implemented with basic auth function
 

	
 
    :created_on: Apr 28, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import logging
 
import traceback
 

	
 
from mercurial.error import RepoError
 
from mercurial.hgweb import hgweb_mod
 

	
 
from paste.httpheaders import REMOTE_USER, AUTH_TYPE
 
from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError, \
 
    HTTPBadRequest, HTTPNotAcceptable
 

	
 
from rhodecode.lib.utils2 import safe_str, fix_PATH, get_server_url
 
from rhodecode.lib.utils2 import safe_str, fix_PATH, get_server_url,\
 
    _set_extras
 
from rhodecode.lib.base import BaseVCSController
 
from rhodecode.lib.auth import get_container_username
 
from rhodecode.lib.utils import make_ui, is_valid_repo, ui_sections
 
from rhodecode.lib.compat import json
 
from rhodecode.model.db import User
 
from rhodecode.lib.exceptions import HTTPLockedRC
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def is_mercurial(environ):
 
    """
 
    Returns True if request's target is mercurial server - header
 
    ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
 
    """
 
    http_accept = environ.get('HTTP_ACCEPT')
 
    path_info = environ['PATH_INFO']
 
    if http_accept and http_accept.startswith('application/mercurial'):
 
        ishg_path = True
 
    else:
 
        ishg_path = False
 

	
 
    log.debug('pathinfo: %s detected as HG %s' % (
 
        path_info, ishg_path)
 
    )
 
    return ishg_path
 

	
 

	
 
class SimpleHg(BaseVCSController):
 

	
 
    def _handle_request(self, environ, start_response):
 
        if not is_mercurial(environ):
 
            return self.application(environ, start_response)
 
        if not self._check_ssl(environ, start_response):
 
            return HTTPNotAcceptable('SSL REQUIRED !')(environ, start_response)
 

	
 
        ip_addr = self._get_ip_addr(environ)
 
        username = None
 
        # skip passing error to error controller
 
        environ['pylons.status_code_redirect'] = True
 

	
 
        #======================================================================
 
        # EXTRACT REPOSITORY NAME FROM ENV
 
        #======================================================================
 
        try:
 
            repo_name = environ['REPO_NAME'] = self.__get_repository(environ)
 
            log.debug('Extracted repo name is %s' % repo_name)
 
        except:
 
            return HTTPInternalServerError()(environ, start_response)
 

	
 
        # quick check if that dir exists...
 
        if is_valid_repo(repo_name, self.basepath, 'hg') is False:
 
            return HTTPNotFound()(environ, start_response)
 

	
 
        #======================================================================
 
        # GET ACTION PULL or PUSH
 
        #======================================================================
 
        action = self.__get_action(environ)
 

	
 
        #======================================================================
 
        # CHECK ANONYMOUS PERMISSION
 
        #======================================================================
 
        if action in ['pull', 'push']:
 
            anonymous_user = self.__get_user('default')
 
            username = anonymous_user.username
 
            anonymous_perm = self._check_permission(action, anonymous_user,
 
                                                    repo_name, ip_addr)
 

	
 
            if anonymous_perm is not True or anonymous_user.active is False:
 
                if anonymous_perm is not True:
 
                    log.debug('Not enough credentials to access this '
 
                              'repository as anonymous user')
 
                if anonymous_user.active is False:
 
                    log.debug('Anonymous access is disabled, running '
 
                              'authentication')
 
                #==============================================================
 
                # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
 
                # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
 
                #==============================================================
 

	
 
                # Attempting to retrieve username from the container
 
                username = get_container_username(environ, self.config)
 

	
 
                # If not authenticated by the container, running basic auth
 
                if not username:
 
                    self.authenticate.realm = \
 
                        safe_str(self.config['rhodecode_realm'])
 
                    result = self.authenticate(environ)
 
                    if isinstance(result, str):
 
                        AUTH_TYPE.update(environ, 'basic')
 
                        REMOTE_USER.update(environ, result)
 
                        username = result
 
                    else:
 
                        return result.wsgi_application(environ, start_response)
 

	
 
                #==============================================================
 
                # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
 
                #==============================================================
 
                try:
 
                    user = self.__get_user(username)
 
                    if user is None or not user.active:
 
                        return HTTPForbidden()(environ, start_response)
 
                    username = user.username
 
                except:
 
                    log.error(traceback.format_exc())
 
                    return HTTPInternalServerError()(environ, start_response)
 

	
 
                #check permissions for this repository
 
                perm = self._check_permission(action, user, repo_name, ip_addr)
 
                if perm is not True:
 
                    return HTTPForbidden()(environ, start_response)
 

	
 
        # extras are injected into mercurial UI object and later available
 
        # in hg hooks executed by rhodecode
 
        from rhodecode import CONFIG
 
        server_url = get_server_url(environ)
 
        extras = {
 
            'ip': ip_addr,
 
            'username': username,
 
            'action': action,
 
            'repository': repo_name,
 
            'scm': 'hg',
 
            'config': CONFIG['__file__'],
 
            'server_url': server_url,
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        #======================================================================
 
        # MERCURIAL REQUEST HANDLING
 
        #======================================================================
 
        repo_path = os.path.join(safe_str(self.basepath), safe_str(repo_name))
 
        log.debug('Repository path is %s' % repo_path)
 

	
 
        # CHECK LOCKING only if it's not ANONYMOUS USER
 
        if username != User.DEFAULT_USER:
 
            log.debug('Checking locking on repository')
 
            (make_lock,
 
             locked,
 
             locked_by) = self._check_locking_state(
 
                            environ=environ, action=action,
 
                            repo=repo_name, user_id=user.user_id
 
                       )
 
            # store the make_lock for later evaluation in hooks
 
            extras.update({'make_lock': make_lock,
 
                           'locked_by': locked_by})
 

	
 
        # set the environ variables for this request
 
        os.environ['RC_SCM_DATA'] = json.dumps(extras)
 
        fix_PATH()
 
        log.debug('HOOKS extras is %s' % extras)
 
        baseui = make_ui('db')
 
        self.__inject_extras(repo_path, baseui, extras)
 

	
 
        try:
 
            log.info('%s action on HG repo "%s" by "%s" from %s' %
 
                     (action, repo_name, username, ip_addr))
 
            app = self.__make_app(repo_path, baseui, extras)
 
            return app(environ, start_response)
 
        except RepoError, e:
 
            if str(e).find('not found') != -1:
 
                return HTTPNotFound()(environ, start_response)
 
        except HTTPLockedRC, e:
 
            _code = CONFIG.get('lock_ret_code')
 
            log.debug('Repository LOCKED ret code %s!' % (_code))
 
            return e(environ, start_response)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return HTTPInternalServerError()(environ, start_response)
 
        finally:
 
            # invalidate cache on push
 
            if action == 'push':
 
                self._invalidate_cache(repo_name)
 

	
 
    def __make_app(self, repo_name, baseui, extras):
 
        """
 
        Make an wsgi application using hgweb, and inject generated baseui
 
        instance, additionally inject some extras into ui object
 
        """
 
        return hgweb_mod.hgweb(repo_name, name=repo_name, baseui=baseui)
 

	
 
    def __get_repository(self, environ):
 
        """
 
        Get's repository name out of PATH_INFO header
 

	
 
        :param environ: environ where PATH_INFO is stored
 
        """
 
        try:
 
            environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO'])
 
            repo_name = '/'.join(environ['PATH_INFO'].split('/')[1:])
 
            if repo_name.endswith('/'):
 
                repo_name = repo_name.rstrip('/')
 
        except:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
        return repo_name
 

	
 
    def __get_user(self, username):
 
        return User.get_by_username(username)
 

	
 
    def __get_action(self, environ):
 
        """
 
        Maps mercurial request commands into a clone,pull or push command.
 
        This should always return a valid command string
 

	
 
        :param environ:
 
        """
 
        mapping = {'changegroup': 'pull',
 
                   'changegroupsubset': 'pull',
 
                   'stream_out': 'pull',
 
                   'listkeys': 'pull',
 
                   'unbundle': 'push',
 
                   'pushkey': 'push', }
 
        for qry in environ['QUERY_STRING'].split('&'):
 
            if qry.startswith('cmd'):
 
                cmd = qry.split('=')[-1]
 
                if cmd in mapping:
 
                    return mapping[cmd]
 

	
 
                return 'pull'
 

	
 
        raise Exception('Unable to detect pull/push action !!'
 
                        'Are you using non standard command or client ?')
 

	
 
    def __inject_extras(self, repo_path, baseui, extras={}):
 
        """
 
        Injects some extra params into baseui instance
 

	
 
        also overwrites global settings with those takes from local hgrc file
 

	
 
        :param baseui: baseui instance
 
        :param extras: dict with extra params to put into baseui
 
        """
 

	
 
        hgrc = os.path.join(repo_path, '.hg', 'hgrc')
 

	
 
        # make our hgweb quiet so it doesn't print output
 
        baseui.setconfig('ui', 'quiet', 'true')
 

	
 
        #inject some additional parameters that will be available in ui
 
        #for hooks
 
        for k, v in extras.items():
 
            baseui.setconfig('rhodecode_extras', k, v)
 

	
 
        repoui = make_ui('file', hgrc, False)
 

	
 
        if repoui:
 
            #overwrite our ui instance with the section from hgrc file
 
            for section in ui_sections:
 
                for k, v in repoui.configitems(section):
 
                    baseui.setconfig(section, k, v)
 
        _set_extras(extras)
rhodecode/lib/utils2.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.utils
 
    ~~~~~~~~~~~~~~~~~~~
 

	
 
    Some simple helper functions
 

	
 
    :created_on: Jan 5, 2011
 
    :author: marcink
 
    :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import re
 
import sys
 
import time
 
import datetime
 
import traceback
 
import webob
 

	
 
from pylons.i18n.translation import _, ungettext
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.compat import json
 

	
 

	
 
def __get_lem():
 
    """
 
    Get language extension map based on what's inside pygments lexers
 
    """
 
    from pygments import lexers
 
    from string import lower
 
    from collections import defaultdict
 

	
 
    d = defaultdict(lambda: [])
 

	
 
    def __clean(s):
 
        s = s.lstrip('*')
 
        s = s.lstrip('.')
 

	
 
        if s.find('[') != -1:
 
            exts = []
 
            start, stop = s.find('['), s.find(']')
 

	
 
            for suffix in s[start + 1:stop]:
 
                exts.append(s[:s.find('[')] + suffix)
 
            return map(lower, exts)
 
        else:
 
            return map(lower, [s])
 

	
 
    for lx, t in sorted(lexers.LEXERS.items()):
 
        m = map(__clean, t[-2])
 
        if m:
 
            m = reduce(lambda x, y: x + y, m)
 
            for ext in m:
 
                desc = lx.replace('Lexer', '')
 
                d[ext].append(desc)
 

	
 
    return dict(d)
 

	
 

	
 
def str2bool(_str):
 
    """
 
    returs True/False value from given string, it tries to translate the
 
    string into boolean
 

	
 
    :param _str: string value to translate into boolean
 
    :rtype: boolean
 
    :returns: boolean from given string
 
    """
 
    if _str is None:
 
        return False
 
    if _str in (True, False):
 
        return _str
 
    _str = str(_str).strip().lower()
 
    return _str in ('t', 'true', 'y', 'yes', 'on', '1')
 

	
 

	
 
def aslist(obj, sep=None, strip=True):
 
    """
 
    Returns given string separated by sep as list
 

	
 
    :param obj:
 
    :param sep:
 
    :param strip:
 
    """
 
    if isinstance(obj, (basestring)):
 
        lst = obj.split(sep)
 
        if strip:
 
            lst = [v.strip() for v in lst]
 
        return lst
 
    elif isinstance(obj, (list, tuple)):
 
        return obj
 
    elif obj is None:
 
        return []
 
    else:
 
        return [obj]
 

	
 

	
 
def convert_line_endings(line, mode):
 
    """
 
    Converts a given line  "line end" accordingly to given mode
 

	
 
    Available modes are::
 
        0 - Unix
 
        1 - Mac
 
        2 - DOS
 

	
 
    :param line: given line to convert
 
    :param mode: mode to convert to
 
    :rtype: str
 
    :return: converted line according to mode
 
    """
 
    from string import replace
 

	
 
    if mode == 0:
 
            line = replace(line, '\r\n', '\n')
 
            line = replace(line, '\r', '\n')
 
    elif mode == 1:
 
            line = replace(line, '\r\n', '\r')
 
            line = replace(line, '\n', '\r')
 
    elif mode == 2:
 
            line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
 
    return line
 

	
 

	
 
def detect_mode(line, default):
 
    """
 
    Detects line break for given line, if line break couldn't be found
 
    given default value is returned
 

	
 
    :param line: str line
 
    :param default: default
 
    :rtype: int
 
    :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
 
    """
 
    if line.endswith('\r\n'):
 
        return 2
 
    elif line.endswith('\n'):
 
        return 0
 
    elif line.endswith('\r'):
 
        return 1
 
    else:
 
        return default
 

	
 

	
 
def generate_api_key(username, salt=None):
 
    """
 
    Generates unique API key for given username, if salt is not given
 
    it'll be generated from some random string
 

	
 
    :param username: username as string
 
    :param salt: salt to hash generate KEY
 
    :rtype: str
 
    :returns: sha1 hash from username+salt
 
    """
 
    from tempfile import _RandomNameSequence
 
    import hashlib
 

	
 
    if salt is None:
 
        salt = _RandomNameSequence().next()
 

	
 
    return hashlib.sha1(username + salt).hexdigest()
 

	
 

	
 
def safe_int(val, default=None):
 
    """
 
    Returns int() of val if val is not convertable to int use default
 
    instead
 

	
 
    :param val:
 
    :param default:
 
    """
 

	
 
    try:
 
        val = int(val)
 
    except (ValueError, TypeError):
 
        val = default
 

	
 
    return val
 

	
 

	
 
def safe_unicode(str_, from_encoding=None):
 
    """
 
    safe unicode function. Does few trick to turn str_ into unicode
 

	
 
    In case of UnicodeDecode error we try to return it with encoding detected
 
    by chardet library if it fails fallback to unicode with errors replaced
 

	
 
    :param str_: string to decode
 
    :rtype: unicode
 
    :returns: unicode object
 
    """
 
    if isinstance(str_, unicode):
 
        return str_
 

	
 
    if not from_encoding:
 
        import rhodecode
 
        DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
 
                                                        'utf8'), sep=',')
 
        from_encoding = DEFAULT_ENCODINGS
 

	
 
    if not isinstance(from_encoding, (list, tuple)):
 
        from_encoding = [from_encoding]
 

	
 
    try:
 
        return unicode(str_)
 
    except UnicodeDecodeError:
 
        pass
 

	
 
    for enc in from_encoding:
 
        try:
 
            return unicode(str_, enc)
 
        except UnicodeDecodeError:
 
            pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(str_)['encoding']
 
        if encoding is None:
 
            raise Exception()
 
        return str_.decode(encoding)
 
    except (ImportError, UnicodeDecodeError, Exception):
 
        return unicode(str_, from_encoding[0], 'replace')
 

	
 

	
 
def safe_str(unicode_, to_encoding=None):
 
    """
 
    safe str function. Does few trick to turn unicode_ into string
 

	
 
    In case of UnicodeEncodeError we try to return it with encoding detected
 
    by chardet library if it fails fallback to string with errors replaced
 

	
 
    :param unicode_: unicode to encode
 
    :rtype: str
 
    :returns: str object
 
    """
 

	
 
    # if it's not basestr cast to str
 
    if not isinstance(unicode_, basestring):
 
        return str(unicode_)
 

	
 
    if isinstance(unicode_, str):
 
        return unicode_
 

	
 
    if not to_encoding:
 
        import rhodecode
 
        DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
 
                                                        'utf8'), sep=',')
 
        to_encoding = DEFAULT_ENCODINGS
 

	
 
    if not isinstance(to_encoding, (list, tuple)):
 
        to_encoding = [to_encoding]
 

	
 
    for enc in to_encoding:
 
        try:
 
            return unicode_.encode(enc)
 
        except UnicodeEncodeError:
 
            pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(unicode_)['encoding']
 
        if encoding is None:
 
            raise UnicodeEncodeError()
 

	
 
        return unicode_.encode(encoding)
 
    except (ImportError, UnicodeEncodeError):
 
        return unicode_.encode(to_encoding[0], 'replace')
 

	
 
    return safe_str
 

	
 

	
 
def remove_suffix(s, suffix):
 
    if s.endswith(suffix):
 
        s = s[:-1 * len(suffix)]
 
    return s
 

	
 

	
 
def remove_prefix(s, prefix):
 
    if s.startswith(prefix):
 
        s = s[len(prefix):]
 
    return s
 

	
 

	
 
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
 
    """
 
    Custom engine_from_config functions that makes sure we use NullPool for
 
    file based sqlite databases. This prevents errors on sqlite. This only
 
    applies to sqlalchemy versions < 0.7.0
 

	
 
    """
 
    import sqlalchemy
 
    from sqlalchemy import engine_from_config as efc
 
    import logging
 

	
 
    if int(sqlalchemy.__version__.split('.')[1]) < 7:
 

	
 
        # This solution should work for sqlalchemy < 0.7.0, and should use
 
        # proxy=TimerProxy() for execution time profiling
 

	
 
        from sqlalchemy.pool import NullPool
 
        url = configuration[prefix + 'url']
 

	
 
        if url.startswith('sqlite'):
 
            kwargs.update({'poolclass': NullPool})
 
        return efc(configuration, prefix, **kwargs)
 
    else:
 
        import time
 
        from sqlalchemy import event
 
        from sqlalchemy.engine import Engine
 

	
 
        log = logging.getLogger('sqlalchemy.engine')
 
        BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
 
        engine = efc(configuration, prefix, **kwargs)
 

	
 
        def color_sql(sql):
 
            COLOR_SEQ = "\033[1;%dm"
 
            COLOR_SQL = YELLOW
 
            normal = '\x1b[0m'
 
            return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
 

	
 
        if configuration['debug']:
 
            #attach events only for debug configuration
 

	
 
            def before_cursor_execute(conn, cursor, statement,
 
                                    parameters, context, executemany):
 
                context._query_start_time = time.time()
 
                log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
 

	
 
            def after_cursor_execute(conn, cursor, statement,
 
                                    parameters, context, executemany):
 
                total = time.time() - context._query_start_time
 
                log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
 

	
 
            event.listen(engine, "before_cursor_execute",
 
                         before_cursor_execute)
 
            event.listen(engine, "after_cursor_execute",
 
                         after_cursor_execute)
 

	
 
    return engine
 

	
 

	
 
def age(prevdate, show_short_version=False):
 
    """
 
    turns a datetime into an age string.
 
    If show_short_version is True, then it will generate a not so accurate but shorter string,
 
    example: 2days ago, instead of 2 days and 23 hours ago.
 

	
 
    :param prevdate: datetime object
 
    :param show_short_version: if it should aproximate the date and return a shorter string
 
    :rtype: unicode
 
    :returns: unicode words describing age
 
    """
 
    now = datetime.datetime.now()
 
    now = now.replace(microsecond=0)
 
    order = ['year', 'month', 'day', 'hour', 'minute', 'second']
 
    deltas = {}
 
    future = False
 

	
 
    if prevdate > now:
 
        now, prevdate = prevdate, now
 
        future = True
 

	
 
    # Get date parts deltas
 
    for part in order:
 
        if future:
 
            from dateutil import relativedelta
 
            d = relativedelta.relativedelta(now, prevdate)
 
            deltas[part] = getattr(d, part + 's')
 
        else:
 
            deltas[part] = getattr(now, part) - getattr(prevdate, part)
 

	
 
    # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
 
    # not 1 hour, -59 minutes and -59 seconds)
 
    for num, length in [(5, 60), (4, 60), (3, 24)]:  # seconds, minutes, hours
 
        part = order[num]
 
        carry_part = order[num - 1]
 

	
 
        if deltas[part] < 0:
 
            deltas[part] += length
 
            deltas[carry_part] -= 1
 

	
 
    # Same thing for days except that the increment depends on the (variable)
 
    # number of days in the month
 
    month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
 
    if deltas['day'] < 0:
 
        if prevdate.month == 2 and (prevdate.year % 4 == 0 and
 
            (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)):
 
            deltas['day'] += 29
 
        else:
 
            deltas['day'] += month_lengths[prevdate.month - 1]
 

	
 
        deltas['month'] -= 1
 

	
 
    if deltas['month'] < 0:
 
        deltas['month'] += 12
 
        deltas['year'] -= 1
 

	
 
    # Format the result
 
    fmt_funcs = {
 
        'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
 
        'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
 
        'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
 
        'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
 
        'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
 
        'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
 
    }
 

	
 
    for i, part in enumerate(order):
 
        value = deltas[part]
 
        if value == 0:
 
            continue
 

	
 
        if i < 5:
 
            sub_part = order[i + 1]
 
            sub_value = deltas[sub_part]
 
        else:
 
            sub_value = 0
 

	
 
        if sub_value == 0 or show_short_version:
 
            if future:
 
                return _(u'in %s') % fmt_funcs[part](value)
 
            else:
 
                return _(u'%s ago') % fmt_funcs[part](value)
 
        if future:
 
            return _(u'in %s and %s') % (fmt_funcs[part](value),
 
                fmt_funcs[sub_part](sub_value))
 
        else:
 
            return _(u'%s and %s ago') % (fmt_funcs[part](value),
 
                fmt_funcs[sub_part](sub_value))
 

	
 
    return _(u'just now')
 

	
 

	
 
def uri_filter(uri):
 
    """
 
    Removes user:password from given url string
 

	
 
    :param uri:
 
    :rtype: unicode
 
    :returns: filtered list of strings
 
    """
 
    if not uri:
 
        return ''
 

	
 
    proto = ''
 

	
 
    for pat in ('https://', 'http://'):
 
        if uri.startswith(pat):
 
            uri = uri[len(pat):]
 
            proto = pat
 
            break
 

	
 
    # remove passwords and username
 
    uri = uri[uri.find('@') + 1:]
 

	
 
    # get the port
 
    cred_pos = uri.find(':')
 
    if cred_pos == -1:
 
        host, port = uri, None
 
    else:
 
        host, port = uri[:cred_pos], uri[cred_pos + 1:]
 

	
 
    return filter(None, [proto, host, port])
 

	
 

	
 
def credentials_filter(uri):
 
    """
 
    Returns a url with removed credentials
 

	
 
    :param uri:
 
    """
 

	
 
    uri = uri_filter(uri)
 
    #check if we have port
 
    if len(uri) > 2 and uri[2]:
 
        uri[2] = ':' + uri[2]
 

	
 
    return ''.join(uri)
 

	
 

	
 
def get_changeset_safe(repo, rev):
 
    """
 
    Safe version of get_changeset if this changeset doesn't exists for a
 
    repo it returns a Dummy one instead
 

	
 
    :param repo:
 
    :param rev:
 
    """
 
    from rhodecode.lib.vcs.backends.base import BaseRepository
 
    from rhodecode.lib.vcs.exceptions import RepositoryError
 
    from rhodecode.lib.vcs.backends.base import EmptyChangeset
 
    if not isinstance(repo, BaseRepository):
 
        raise Exception('You must pass an Repository '
 
                        'object as first argument got %s', type(repo))
 

	
 
    try:
 
        cs = repo.get_changeset(rev)
 
    except RepositoryError:
 
        cs = EmptyChangeset(requested_revision=rev)
 
    return cs
 

	
 

	
 
def datetime_to_time(dt):
 
    if dt:
 
        return time.mktime(dt.timetuple())
 

	
 

	
 
def time_to_datetime(tm):
 
    if tm:
 
        if isinstance(tm, basestring):
 
            try:
 
                tm = float(tm)
 
            except ValueError:
 
                return
 
        return datetime.datetime.fromtimestamp(tm)
 

	
 
MENTIONS_REGEX = r'(?:^@|\s@)([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)(?:\s{1})'
 

	
 

	
 
def extract_mentioned_users(s):
 
    """
 
    Returns unique usernames from given string s that have @mention
 

	
 
    :param s: string to get mentions
 
    """
 
    usrs = set()
 
    for username in re.findall(MENTIONS_REGEX, s):
 
        usrs.add(username)
 

	
 
    return sorted(list(usrs), key=lambda k: k.lower())
 

	
 

	
 
class AttributeDict(dict):
 
    def __getattr__(self, attr):
 
        return self.get(attr, None)
 
    __setattr__ = dict.__setitem__
 
    __delattr__ = dict.__delitem__
 

	
 

	
 
def fix_PATH(os_=None):
 
    """
 
    Get current active python path, and append it to PATH variable to fix issues
 
    of subprocess calls and different python versions
 
    """
 
    import sys
 
    if os_ is None:
 
        import os
 
    else:
 
        os = os_
 

	
 
    cur_path = os.path.split(sys.executable)[0]
 
    if not os.environ['PATH'].startswith(cur_path):
 
        os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
 

	
 

	
 
def obfuscate_url_pw(engine):
 
    _url = engine or ''
 
    from sqlalchemy.engine import url as sa_url
 
    try:
 
        _url = sa_url.make_url(engine)
 
        if _url.password:
 
            _url.password = 'XXXXX'
 
    except:
 
        pass
 
    return str(_url)
 

	
 

	
 
def get_server_url(environ):
 
    req = webob.Request(environ)
 
    return req.host_url + req.script_name
 

	
 

	
 
def _extract_extras():
 
    """
 
    Extracts the rc extras data from os.environ, and wraps it into named
 
    AttributeDict object
 
    """
 
    try:
 
        rc_extras = json.loads(os.environ['RC_SCM_DATA'])
 
    except:
 
        print os.environ
 
        print >> sys.stderr, traceback.format_exc()
 
        rc_extras = {}
 

	
 
    try:
 
        for k in ['username', 'repository', 'locked_by', 'scm', 'make_lock',
 
                  'action', 'ip']:
 
            rc_extras[k]
 
    except KeyError, e:
 
        raise Exception('Missing key %s in os.environ %s' % (e, rc_extras))
 

	
 
    return AttributeDict(rc_extras)
 

	
 

	
 
def _set_extras(extras):
 
    os.environ['RC_SCM_DATA'] = json.dumps(extras)
rhodecode/lib/vcs/backends/base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.base
 
    ~~~~~~~~~~~~~~~~~
 

	
 
    Base for all available scm backends
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import datetime
 
from itertools import chain
 
from rhodecode.lib.vcs.utils import author_name, author_email
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.helpers import get_dict_for_attrs
 
from rhodecode.lib.vcs.conf import settings
 

	
 
from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
 
    NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, \
 
    NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, \
 
    RepositoryError
 

	
 

	
 
class BaseRepository(object):
 
    """
 
    Base Repository for final backends
 

	
 
    **Attributes**
 

	
 
        ``DEFAULT_BRANCH_NAME``
 
            name of default branch (i.e. "trunk" for svn, "master" for git etc.
 

	
 
        ``scm``
 
            alias of scm, i.e. *git* or *hg*
 

	
 
        ``repo``
 
            object from external api
 

	
 
        ``revisions``
 
            list of all available revisions' ids, in ascending order
 

	
 
        ``changesets``
 
            storage dict caching returned changesets
 

	
 
        ``path``
 
            absolute path to the repository
 

	
 
        ``branches``
 
            branches as list of changesets
 

	
 
        ``tags``
 
            tags as list of changesets
 
    """
 
    scm = None
 
    DEFAULT_BRANCH_NAME = None
 
    EMPTY_CHANGESET = '0' * 40
 

	
 
    def __init__(self, repo_path, create=False, **kwargs):
 
        """
 
        Initializes repository. Raises RepositoryError if repository could
 
        not be find at the given ``repo_path`` or directory at ``repo_path``
 
        exists and ``create`` is set to True.
 

	
 
        :param repo_path: local path of the repository
 
        :param create=False: if set to True, would try to craete repository.
 
        :param src_url=None: if set, should be proper url from which repository
 
          would be cloned; requires ``create`` parameter to be set to True -
 
          raises RepositoryError if src_url is set and create evaluates to
 
          False
 
        """
 
        raise NotImplementedError
 

	
 
    def __str__(self):
 
        return '<%s at %s>' % (self.__class__.__name__, self.path)
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __len__(self):
 
        return self.count()
 

	
 
    @LazyProperty
 
    def alias(self):
 
        for k, v in settings.BACKENDS.items():
 
            if v.split('.')[-1] == str(self.__class__.__name__):
 
                return k
 

	
 
    @LazyProperty
 
    def name(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def owner(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def description(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def size(self):
 
        """
 
        Returns combined size in bytes for all repository files
 
        """
 

	
 
        size = 0
 
        try:
 
            tip = self.get_changeset()
 
            for topnode, dirs, files in tip.walk('/'):
 
                for f in files:
 
                    size += tip.get_file_size(f.path)
 
                for dir in dirs:
 
                    for f in files:
 
                        size += tip.get_file_size(f.path)
 

	
 
        except RepositoryError, e:
 
            pass
 
        return size
 

	
 
    def is_valid(self):
 
        """
 
        Validates repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_last_change(self):
 
        self.get_changesets()
 

	
 
    #==========================================================================
 
    # CHANGESETS
 
    #==========================================================================
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns instance of ``Changeset`` class. If ``revision`` is None, most
 
        recent changeset is returned.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no revisions
 
        """
 
        raise NotImplementedError
 

	
 
    def __iter__(self):
 
        """
 
        Allows Repository objects to be iterated.
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for revision in self.revisions:
 
            yield self.get_changeset(revision)
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
                       end_date=None, branch_name=None, reverse=False):
 
        """
 
        Returns iterator of ``MercurialChangeset`` objects from start to end
 
        not inclusive This should behave just like a list, ie. end is not
 
        inclusive
 

	
 
        :param start: None or str
 
        :param end: None or str
 
        :param start_date:
 
        :param end_date:
 
        :param branch_name:
 
        :param reversed:
 
        """
 
        raise NotImplementedError
 

	
 
    def __getslice__(self, i, j):
 
        """
 
        Returns a iterator of sliced repository
 
        """
 
        for rev in self.revisions[i:j]:
 
            yield self.get_changeset(rev)
 

	
 
    def __getitem__(self, key):
 
        return self.get_changeset(key)
 

	
 
    def count(self):
 
        return len(self.revisions)
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None, **opts):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        raise NotImplementedError
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        raise NotImplementedError
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
            context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``.
 
        """
 
        raise NotImplementedError
 

	
 
    # ========== #
 
    # COMMIT API #
 
    # ========== #
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        """
 
        Returns ``InMemoryChangeset`` object for this repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def add(self, filenode, **kwargs):
 
        """
 
        Commit api function that will add given ``FileNode`` into this
 
        repository.
 

	
 
        :raises ``NodeAlreadyExistsError``: if there is a file with same path
 
          already in repository
 
        :raises ``NodeAlreadyAddedError``: if given node is already marked as
 
          *added*
 
        """
 
        raise NotImplementedError
 

	
 
    def remove(self, filenode, **kwargs):
 
        """
 
        Commit api function that will remove given ``FileNode`` into this
 
        repository.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no changesets yet
 
        :raises ``NodeDoesNotExistError``: if there is no file with given path
 
        """
 
        raise NotImplementedError
 

	
 
    def commit(self, message, **kwargs):
 
        """
 
        Persists current changes made on this repository and returns newly
 
        created changeset.
 

	
 
        :raises ``NothingChangedError``: if no changes has been made
 
        """
 
        raise NotImplementedError
 

	
 
    def get_state(self):
 
        """
 
        Returns dictionary with ``added``, ``changed`` and ``removed`` lists
 
        containing ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_config_value(self, section, name, config_file=None):
 
        """
 
        Returns configuration value for a given [``section``] and ``name``.
 

	
 
        :param section: Section we want to retrieve value from
 
        :param name: Name of configuration we want to retrieve
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    def get_user_name(self, config_file=None):
 
        """
 
        Returns user's name from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    def get_user_email(self, config_file=None):
 
        """
 
        Returns user's email from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    # =========== #
 
    # WORKDIR API #
 
    # =========== #
 

	
 
    @LazyProperty
 
    def workdir(self):
 
        """
 
        Returns ``Workdir`` instance for this repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def inject_ui(self, **extras):
 
        """
 
        Injects extra parameters into UI object of this repo
 
        """
 
        required_extras = [
 
            'ip',
 
            'username',
 
            'action',
 
            'repository',
 
            'scm',
 
            'config',
 
            'server_url',
 
            'make_lock',
 
            'locked_by',
 
        ]
 
        for req in required_extras:
 
            if req not in extras:
 
                raise AttributeError('Missing attribute %s in extras' % (req))
 
        for k, v in extras.items():
 
            self._repo.ui.setconfig('rhodecode_extras', k, v)
 

	
 

	
 
class BaseChangeset(object):
 
    """
 
    Each backend should implement it's changeset representation.
 

	
 
    **Attributes**
 

	
 
        ``repository``
 
            repository object within which changeset exists
 

	
 
        ``id``
 
            may be ``raw_id`` or i.e. for mercurial's tip just ``tip``
 

	
 
        ``raw_id``
 
            raw changeset representation (i.e. full 40 length sha for git
 
            backend)
 

	
 
        ``short_id``
 
            shortened (if apply) version of ``raw_id``; it would be simple
 
            shortcut for ``raw_id[:12]`` for git/mercurial backends or same
 
            as ``raw_id`` for subversion
 

	
 
        ``revision``
 
            revision number as integer
 

	
 
        ``files``
 
            list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
 

	
 
        ``dirs``
 
            list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
 

	
 
        ``nodes``
 
            combined list of ``Node`` objects
 

	
 
        ``author``
 
            author of the changeset, as unicode
 

	
 
        ``message``
 
            message of the changeset, as unicode
 

	
 
        ``parents``
 
            list of parent changesets
 

	
 
        ``last``
 
            ``True`` if this is last changeset in repository, ``False``
 
            otherwise; trying to access this attribute while there is no
 
            changesets would raise ``EmptyRepositoryError``
 
    """
 
    def __str__(self):
 
        return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
 
            self.short_id)
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __unicode__(self):
 
        return u'%s:%s' % (self.revision, self.short_id)
 

	
 
    def __eq__(self, other):
 
        return self.raw_id == other.raw_id
 

	
 
    def __json__(self):
 
        return dict(
 
            short_id=self.short_id,
 
            raw_id=self.raw_id,
 
            revision=self.revision,
 
            message=self.message,
 
            date=self.date,
 
            author=self.author,
 
        )
 

	
 
    @LazyProperty
 
    def last(self):
 
        if self.repository is None:
 
            raise ChangesetError("Cannot check if it's most recent revision")
 
        return self.raw_id == self.repository.revisions[-1]
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def id(self):
 
        """
 
        Returns string identifying this changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """
 
        Returns raw string identifying this changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        """
 
        Returns shortened version of ``raw_id`` attribute, as string,
 
        identifying this changeset, useful for web representation.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def revision(self):
 
        """
 
        Returns integer identifying this changeset.
 

	
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def committer(self):
 
        """
 
        Returns Committer for given commit
 
        """
 

	
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def committer_name(self):
 
        """
 
        Returns Author name for given commit
 
        """
 

	
 
        return author_name(self.committer)
 

	
 
    @LazyProperty
 
    def committer_email(self):
 
        """
 
        Returns Author email address for given commit
 
        """
 

	
 
        return author_email(self.committer)
 

	
 
    @LazyProperty
 
    def author(self):
 
        """
 
        Returns Author for given commit
 
        """
 

	
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def author_name(self):
 
        """
 
        Returns Author name for given commit
 
        """
 

	
 
        return author_name(self.author)
 

	
 
    @LazyProperty
 
    def author_email(self):
 
        """
 
        Returns Author email address for given commit
 
        """
 

	
 
        return author_email(self.author)
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_history(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_nodes(self, path):
 
        """
 
        Returns combined ``DirNode`` and ``FileNode`` objects list representing
 
        state of changeset at the given ``path``.
 

	
 
        :raises ``ChangesetError``: if node at the given ``path`` is not
 
          instance of ``DirNode``
 
        """
 
        raise NotImplementedError
 

	
 
    def get_node(self, path):
 
        """
 
        Returns ``Node`` object from the given ``path``.
 

	
 
        :raises ``NodeDoesNotExistError``: if there is no node at the given
 
          ``path``
 
        """
 
        raise NotImplementedError
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tar``, ``tgz``
 
            or ``tbz2``. Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash.
 

	
 
            repo-tip.<kind>
 
        """
 

	
 
        raise NotImplementedError
 

	
 
    def get_chunked_archive(self, **kwargs):
 
        """
 
        Returns iterable archive. Tiny wrapper around ``fill_archive`` method.
 

	
 
        :param chunk_size: extra parameter which controls size of returned
 
            chunks. Default:8k.
 
        """
 

	
 
        chunk_size = kwargs.pop('chunk_size', 8192)
 
        stream = kwargs.get('stream')
 
        self.fill_archive(**kwargs)
 
        while True:
 
            data = stream.read(chunk_size)
 
            if not data:
 
                break
 
            yield data
 

	
 
    @LazyProperty
 
    def root(self):
 
        """
 
        Returns ``RootNode`` object for this changeset.
 
        """
 
        return self.get_node('')
 

	
 
    def next(self, branch=None):
 
        """
 
        Returns next changeset from current, if branch is gives it will return
 
        next changeset belonging to this branch
 

	
 
        :param branch: show changesets within the given named branch
 
        """
 
        raise NotImplementedError
 

	
 
    def prev(self, branch=None):
 
        """
 
        Returns previous changeset from current, if branch is gives it will
 
        return previous changeset belonging to this branch
 

	
 
        :param branch: show changesets within the given named branch
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def size(self):
 
        """
 
        Returns total number of bytes from contents of all filenodes.
 
        """
 
        return sum((node.size for node in self.get_filenodes_generator()))
 

	
 
    def walk(self, topurl=''):
 
        """
 
        Similar to os.walk method. Insted of filesystem it walks through
 
        changeset starting at given ``topurl``.  Returns generator of tuples
 
        (topnode, dirnodes, filenodes).
 
        """
 
        topnode = self.get_node(topurl)
 
        yield (topnode, topnode.dirs, topnode.files)
 
        for dirnode in topnode.dirs:
 
            for tup in self.walk(dirnode.path):
 
                yield tup
 

	
 
    def get_filenodes_generator(self):
 
        """
 
        Returns generator that yields *all* file nodes.
 
        """
 
        for topnode, dirs, files in self.walk():
 
            for node in files:
 
                yield node
 

	
 
    def as_dict(self):
 
        """
 
        Returns dictionary with changeset's attributes and their values.
 
        """
 
        data = get_dict_for_attrs(self, ['id', 'raw_id', 'short_id',
 
            'revision', 'date', 'message'])
 
        data['author'] = {'name': self.author_name, 'email': self.author_email}
 
        data['added'] = [node.path for node in self.added]
 
        data['changed'] = [node.path for node in self.changed]
 
        data['removed'] = [node.path for node in self.removed]
 
        return data
 

	
 

	
 
class BaseWorkdir(object):
 
    """
 
    Working directory representation of single repository.
 

	
 
    :attribute: repository: repository object of working directory
 
    """
 

	
 
    def __init__(self, repository):
 
        self.repository = repository
 

	
 
    def get_branch(self):
 
        """
 
        Returns name of current branch.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_changeset(self):
 
        """
 
        Returns current changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_added(self):
 
        """
 
        Returns list of ``FileNode`` objects marked as *new* in working
 
        directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_changed(self):
 
        """
 
        Returns list of ``FileNode`` objects *changed* in working directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_removed(self):
 
        """
 
        Returns list of ``RemovedFileNode`` objects marked as *removed* in
 
        working directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_untracked(self):
 
        """
 
        Returns list of ``FileNode`` objects which are present within working
 
        directory however are not tracked by repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_status(self):
 
        """
 
        Returns dict with ``added``, ``changed``, ``removed`` and ``untracked``
 
        lists.
 
        """
 
        raise NotImplementedError
 

	
 
    def commit(self, message, **kwargs):
 
        """
 
        Commits local (from working directory) changes and returns newly
 
        created
 
        ``Changeset``. Updates repository's ``revisions`` list.
 

	
 
        :raises ``CommitError``: if any error occurs while committing
 
        """
 
        raise NotImplementedError
 

	
 
    def update(self, revision=None):
 
        """
 
        Fetches content of the given revision and populates it within working
 
        directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def checkout_branch(self, branch=None):
 
        """
 
        Checks out ``branch`` or the backend's default branch.
 

	
 
        Raises ``BranchDoesNotExistError`` if the branch does not exist.
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class BaseInMemoryChangeset(object):
 
    """
 
    Represents differences between repository's state (most recent head) and
 
    changes made *in place*.
 

	
 
    **Attributes**
 

	
 
        ``repository``
 
            repository object for this in-memory-changeset
 

	
 
        ``added``
 
            list of ``FileNode`` objects marked as *added*
 

	
 
        ``changed``
 
            list of ``FileNode`` objects marked as *changed*
 

	
 
        ``removed``
 
            list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
 
            *removed*
 

	
 
        ``parents``
 
            list of ``Changeset`` representing parents of in-memory changeset.
 
            Should always be 2-element sequence.
 

	
 
    """
 

	
 
    def __init__(self, repository):
 
        self.repository = repository
 
        self.added = []
 
        self.changed = []
 
        self.removed = []
 
        self.parents = []
 

	
 
    def add(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` objects as *to be committed*.
 

	
 
        :raises ``NodeAlreadyExistsError``: if node with same path exists at
 
          latest changeset
 
        :raises ``NodeAlreadyAddedError``: if node with same path is already
 
          marked as *added*
 
        """
 
        # Check if not already marked as *added* first
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.added):
 
                raise NodeAlreadyAddedError("Such FileNode %s is already "
 
                    "marked for addition" % node.path)
 
        for node in filenodes:
 
            self.added.append(node)
 

	
 
    def change(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` objects to be *changed* in next commit.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no changesets yet
 
        :raises ``NodeAlreadyExistsError``: if node with same path is already
 
          marked to be *changed*
 
        :raises ``NodeAlreadyRemovedError``: if node with same path is already
 
          marked to be *removed*
 
        :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
 
          changeset
 
        :raises ``NodeNotChangedError``: if node hasn't really be changed
 
        """
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.removed):
 
                raise NodeAlreadyRemovedError("Node at %s is already marked "
 
                    "as removed" % node.path)
 
        try:
 
            self.repository.get_changeset()
 
        except EmptyRepositoryError:
 
            raise EmptyRepositoryError("Nothing to change - try to *add* new "
 
                "nodes rather than changing them")
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.changed):
 
                raise NodeAlreadyChangedError("Node at '%s' is already "
 
                    "marked as changed" % node.path)
 
            self.changed.append(node)
 

	
 
    def remove(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
 
        *removed* in next commit.
 

	
 
        :raises ``NodeAlreadyRemovedError``: if node has been already marked to
 
          be *removed*
 
        :raises ``NodeAlreadyChangedError``: if node has been already marked to
 
          be *changed*
 
        """
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.removed):
 
                raise NodeAlreadyRemovedError("Node is already marked to "
 
                    "for removal at %s" % node.path)
 
            if node.path in (n.path for n in self.changed):
 
                raise NodeAlreadyChangedError("Node is already marked to "
 
                    "be changed at %s" % node.path)
 
            # We only mark node as *removed* - real removal is done by
 
            # commit method
 
            self.removed.append(node)
 

	
 
    def reset(self):
 
        """
 
        Resets this instance to initial state (cleans ``added``, ``changed``
 
        and ``removed`` lists).
 
        """
 
        self.added = []
 
        self.changed = []
 
        self.removed = []
 
        self.parents = []
 

	
 
    def get_ipaths(self):
 
        """
 
        Returns generator of paths from nodes marked as added, changed or
 
        removed.
 
        """
 
        for node in chain(self.added, self.changed, self.removed):
 
            yield node.path
 

	
 
    def get_paths(self):
 
        """
 
        Returns list of paths from nodes marked as added, changed or removed.
 
        """
 
        return list(self.get_ipaths())
 

	
 
    def check_integrity(self, parents=None):
 
        """
 
        Checks in-memory changeset's integrity. Also, sets parents if not
 
        already set.
 

	
 
        :raises CommitError: if any error occurs (i.e.
 
          ``NodeDoesNotExistError``).
 
        """
 
        if not self.parents:
 
            parents = parents or []
 
            if len(parents) == 0:
 
                try:
 
                    parents = [self.repository.get_changeset(), None]
 
                except EmptyRepositoryError:
 
                    parents = [None, None]
 
            elif len(parents) == 1:
 
                parents += [None]
 
            self.parents = parents
 

	
 
        # Local parents, only if not None
 
        parents = [p for p in self.parents if p]
 

	
 
        # Check nodes marked as added
 
        for p in parents:
 
            for node in self.added:
 
                try:
 
                    p.get_node(node.path)
 
                except NodeDoesNotExistError:
 
                    pass
 
                else:
 
                    raise NodeAlreadyExistsError("Node at %s already exists "
 
                        "at %s" % (node.path, p))
 

	
 
        # Check nodes marked as changed
 
        missing = set(self.changed)
 
        not_changed = set(self.changed)
 
        if self.changed and not parents:
 
            raise NodeDoesNotExistError(str(self.changed[0].path))
 
        for p in parents:
 
            for node in self.changed:
 
                try:
 
                    old = p.get_node(node.path)
 
                    missing.remove(node)
 
                    if old.content != node.content:
 
                        not_changed.remove(node)
 
                except NodeDoesNotExistError:
 
                    pass
 
        if self.changed and missing:
 
            raise NodeDoesNotExistError("Node at %s is missing "
 
                "(parents: %s)" % (node.path, parents))
 

	
 
        if self.changed and not_changed:
 
            raise NodeNotChangedError("Node at %s wasn't actually changed "
 
                "since parents' changesets: %s" % (not_changed.pop().path,
 
                    parents)
 
            )
 

	
 
        # Check nodes marked as removed
 
        if self.removed and not parents:
 
            raise NodeDoesNotExistError("Cannot remove node at %s as there "
 
                "were no parents specified" % self.removed[0].path)
 
        really_removed = set()
 
        for p in parents:
 
            for node in self.removed:
 
                try:
 
                    p.get_node(node.path)
 
                    really_removed.add(node)
 
                except ChangesetError:
 
                    pass
 
        not_removed = set(self.removed) - really_removed
 
        if not_removed:
 
            raise NodeDoesNotExistError("Cannot remove node at %s from "
 
                "following parents: %s" % (not_removed[0], parents))
 

	
 
    def commit(self, message, author, parents=None, branch=None, date=None,
 
            **kwargs):
 
        """
 
        Performs in-memory commit (doesn't check workdir in any way) and
 
        returns newly created ``Changeset``. Updates repository's
 
        ``revisions``.
 

	
 
        .. note::
 
            While overriding this method each backend's should call
 
            ``self.check_integrity(parents)`` in the first place.
 

	
 
        :param message: message of the commit
 
        :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
 
        :param parents: single parent or sequence of parents from which commit
 
          would be derieved
 
        :param date: ``datetime.datetime`` instance. Defaults to
 
          ``datetime.datetime.now()``.
 
        :param branch: branch name, as string. If none given, default backend's
 
          branch would be used.
 

	
 
        :raises ``CommitError``: if any error occurs while committing
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class EmptyChangeset(BaseChangeset):
 
    """
 
    An dummy empty changeset. It's possible to pass hash when creating
 
    an EmptyChangeset
 
    """
 

	
 
    def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
 
                 alias=None, revision=-1, message='', author='', date=None):
 
        self._empty_cs = cs
 
        self.revision = revision
 
        self.message = message
 
        self.author = author
 
        self.date = date or datetime.datetime.fromtimestamp(0)
 
        self.repository = repo
 
        self.requested_revision = requested_revision
 
        self.alias = alias
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """
 
        Returns raw string identifying this changeset, useful for web
 
        representation.
 
        """
 

	
 
        return self._empty_cs
 

	
 
    @LazyProperty
 
    def branch(self):
 
        from rhodecode.lib.vcs.backends import get_backend
 
        return get_backend(self.alias).DEFAULT_BRANCH_NAME
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    def get_file_changeset(self, path):
 
        return self
 

	
 
    def get_file_content(self, path):
 
        return u''
 

	
 
    def get_file_size(self, path):
 
        return 0
rhodecode/lib/vcs/backends/git/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.git
 
    ~~~~~~~~~~~~~~~~
 

	
 
    Git backend implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import os
 
import re
 
import time
 
import posixpath
 
import logging
 
import traceback
 
import urllib
 
import urllib2
 
from dulwich.repo import Repo, NotGitRepository
 
from dulwich.objects import Tag
 
from string import Template
 

	
 
import rhodecode
 
from rhodecode.lib.vcs.backends.base import BaseRepository
 
from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
 
from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
 
from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
 
from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
 
from rhodecode.lib.vcs.utils.paths import abspath
 
from rhodecode.lib.vcs.utils.paths import get_user_home
 
from .workdir import GitWorkdir
 
from .changeset import GitChangeset
 
from .inmemory import GitInMemoryChangeset
 
from .config import ConfigFile
 
from rhodecode.lib import subprocessio
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitRepository(BaseRepository):
 
    """
 
    Git repository backend.
 
    """
 
    DEFAULT_BRANCH_NAME = 'master'
 
    scm = 'git'
 

	
 
    def __init__(self, repo_path, create=False, src_url=None,
 
                 update_after_clone=False, bare=False):
 

	
 
        self.path = abspath(repo_path)
 
        repo = self._get_repo(create, src_url, update_after_clone, bare)
 
        self.bare = repo.bare
 

	
 
        self._config_files = [
 
            bare and abspath(self.path, 'config')
 
                     or abspath(self.path, '.git', 'config'),
 
            abspath(get_user_home(), '.gitconfig'),
 
        ]
 

	
 
    @ThreadLocalLazyProperty
 
    def _repo(self):
 
        repo = Repo(self.path)
 
        # patch the instance of GitRepo with an "FAKE" ui object to add
 
        # compatibility layer with Mercurial
 
        if not hasattr(repo, 'ui'):
 
            from mercurial.ui import ui
 
            baseui = ui()
 
            setattr(repo, 'ui', baseui)
 
        return repo
 
        return Repo(self.path)
 

	
 
    @property
 
    def head(self):
 
        try:
 
            return self._repo.head()
 
        except KeyError:
 
            return None
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
        """
 
        return self._get_all_revisions()
 

	
 
    @classmethod
 
    def _run_git_command(cls, cmd, **opts):
 
        """
 
        Runs given ``cmd`` as git command and returns tuple
 
        (stdout, stderr).
 

	
 
        :param cmd: git command to be executed
 
        :param opts: env options to pass into Subprocess command
 
        """
 

	
 
        if '_bare' in opts:
 
            _copts = []
 
            del opts['_bare']
 
        else:
 
            _copts = ['-c', 'core.quotepath=false', ]
 
        safe_call = False
 
        if '_safe' in opts:
 
            #no exc on failure
 
            del opts['_safe']
 
            safe_call = True
 

	
 
        _str_cmd = False
 
        if isinstance(cmd, basestring):
 
            cmd = [cmd]
 
            _str_cmd = True
 

	
 
        gitenv = os.environ
 
        # need to clean fix GIT_DIR !
 
        if 'GIT_DIR' in gitenv:
 
            del gitenv['GIT_DIR']
 
        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 

	
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        cmd = [_git_path] + _copts + cmd
 
        if _str_cmd:
 
            cmd = ' '.join(cmd)
 
        try:
 
            _opts = dict(
 
                env=gitenv,
 
                shell=False,
 
            )
 
            _opts.update(opts)
 
            p = subprocessio.SubprocessIOChunker(cmd, **_opts)
 
        except (EnvironmentError, OSError), err:
 
            tb_err = ("Couldn't run git command (%s).\n"
 
                      "Original error was:%s\n" % (cmd, err))
 
            log.error(tb_err)
 
            if safe_call:
 
                return '', err
 
            else:
 
                raise RepositoryError(tb_err)
 

	
 
        return ''.join(p.output), ''.join(p.error)
 

	
 
    def run_git_command(self, cmd):
 
        opts = {}
 
        if os.path.isdir(self.path):
 
            opts['cwd'] = self.path
 
        return self._run_git_command(cmd, **opts)
 

	
 
    @classmethod
 
    def _check_url(cls, url):
 
        """
 
        Functon will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that mercurial will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError
 
        """
 
        from mercurial.util import url as Url
 

	
 
        # those authnadlers are patched for python 2.6.5 bug an
 
        # infinit looping when given invalid resources
 
        from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
 

	
 
        # check first if it's not an local url
 
        if os.path.isdir(url) or url.startswith('file:'):
 
            return True
 

	
 
        if('+' in url[:url.find('://')]):
 
            url = url[url.find('+') + 1:]
 

	
 
        handlers = []
 
        test_uri, authinfo = Url(url).authinfo()
 
        if not test_uri.endswith('info/refs'):
 
            test_uri = test_uri.rstrip('/') + '/info/refs'
 
        if authinfo:
 
            #create a password manager
 
            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((httpbasicauthhandler(passmgr),
 
                             httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib2.build_opener(*handlers)
 
        o.addheaders = [('User-Agent', 'git/1.7.8.0')]  # fake some git
 

	
 
        q = {"service": 'git-upload-pack'}
 
        qs = '?%s' % urllib.urlencode(q)
 
        cu = "%s%s" % (test_uri, qs)
 
        req = urllib2.Request(cu, None, {})
 

	
 
        try:
 
            resp = o.open(req)
 
            return resp.code == 200
 
        except Exception, e:
 
            # means it cannot be cloned
 
            raise urllib2.URLError("[%s] %s" % (url, e))
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False,
 
                  bare=False):
 
        if create and os.path.exists(self.path):
 
            raise RepositoryError("Location already exist")
 
        if src_url and not create:
 
            raise RepositoryError("Create should be set to True if src_url is "
 
                                  "given (clone operation creates repository)")
 
        try:
 
            if create and src_url:
 
                GitRepository._check_url(src_url)
 
                self.clone(src_url, update_after_clone, bare)
 
                return Repo(self.path)
 
            elif create:
 
                os.mkdir(self.path)
 
                if bare:
 
                    return Repo.init_bare(self.path)
 
                else:
 
                    return Repo.init(self.path)
 
            else:
 
                return self._repo
 
        except (NotGitRepository, OSError), err:
 
            raise RepositoryError(err)
 

	
 
    def _get_all_revisions(self):
 
        # we must check if this repo is not empty, since later command
 
        # fails if it is. And it's cheaper to ask than throw the subprocess
 
        # errors
 
        try:
 
            self._repo.head()
 
        except KeyError:
 
            return []
 
        rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter',
 
                                                      '--all').strip()
 
        cmd = 'rev-list %s --reverse --date-order' % (rev_filter)
 
        try:
 
            so, se = self.run_git_command(cmd)
 
        except RepositoryError:
 
            # Can be raised for empty repositories
 
            return []
 
        return so.splitlines()
 

	
 
    def _get_all_revisions2(self):
 
        #alternate implementation using dulwich
 
        includes = [x[1][0] for x in self._parsed_refs.iteritems()
 
                    if x[1][1] != 'T']
 
        return [c.commit.id for c in self._repo.get_walker(include=includes)]
 

	
 
    def _get_revision(self, revision):
 
        """
 
        For git backend we always return integer here. This way we ensure
 
        that changset's revision attribute would become integer.
 
        """
 
        pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
 
        is_bstr = lambda o: isinstance(o, (str, unicode))
 
        is_null = lambda o: len(o) == revision.count('0')
 

	
 
        if len(self.revisions) == 0:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in (None, '', 'tip', 'HEAD', 'head', -1):
 
            revision = self.revisions[-1]
 

	
 
        if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
 
            or isinstance(revision, int) or is_null(revision)):
 
            try:
 
                revision = self.revisions[int(revision)]
 
            except:
 
                raise ChangesetDoesNotExistError("Revision %s does not exist "
 
                    "for this repository" % (revision))
 

	
 
        elif is_bstr(revision):
 
            # get by branch/tag name
 
            _ref_revision = self._parsed_refs.get(revision)
 
            _tags_shas = self.tags.values()
 
            if _ref_revision:  # and _ref_revision[1] in ['H', 'RH', 'T']:
 
                return _ref_revision[0]
 

	
 
            # maybe it's a tag ? we don't have them in self.revisions
 
            elif revision in _tags_shas:
 
                return _tags_shas[_tags_shas.index(revision)]
 

	
 
            elif not pattern.match(revision) or revision not in self.revisions:
 
                raise ChangesetDoesNotExistError("Revision %s does not exist "
 
                    "for this repository" % (revision))
 

	
 
        # Ensure we return full id
 
        if not pattern.match(str(revision)):
 
            raise ChangesetDoesNotExistError("Given revision %s not recognized"
 
                % revision)
 
        return revision
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
                yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        url = str(url)
 
        if url != 'default' and not '://' in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    def get_hook_location(self):
 
        """
 
        returns absolute path to location where hooks are stored
 
        """
 
        loc = os.path.join(self.path, 'hooks')
 
        if not self.bare:
 
            loc = os.path.join(self.path, '.git', 'hooks')
 
        return loc
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            idx_loc = '' if self.bare else '.git'
 
            # fallback to filesystem
 
            in_path = os.path.join(self.path, idx_loc, "index")
 
            he_path = os.path.join(self.path, idx_loc, "HEAD")
 
            if os.path.exists(in_path):
 
                return os.stat(in_path).st_mtime
 
            else:
 
                return os.stat(he_path).st_mtime
 

	
 
    @LazyProperty
 
    def description(self):
 
        idx_loc = '' if self.bare else '.git'
 
        undefined_description = u'unknown'
 
        description_path = os.path.join(self.path, idx_loc, 'description')
 
        if os.path.isfile(description_path):
 
            return safe_unicode(open(description_path).read())
 
        else:
 
            return undefined_description
 

	
 
    @LazyProperty
 
    def contact(self):
 
        undefined_contact = u'Unknown'
 
        return undefined_contact
 

	
 
    @property
 
    def branches(self):
 
        if not self.revisions:
 
            return {}
 
        sortkey = lambda ctx: ctx[0]
 
        _branches = [(x[0], x[1][0])
 
                     for x in self._parsed_refs.iteritems() if x[1][1] == 'H']
 
        return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return self._get_tags()
 

	
 
    def _get_tags(self):
 
        if not self.revisions:
 
            return {}
 

	
 
        sortkey = lambda ctx: ctx[0]
 
        _tags = [(x[0], x[1][0])
 
                 for x in self._parsed_refs.iteritems() if x[1][1] == 'T']
 
        return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None,
 
            **kwargs):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        if name in self.tags:
 
            raise TagAlreadyExistError("Tag %s already exists" % name)
 
        changeset = self.get_changeset(revision)
 
        message = message or "Added tag %s for commit %s" % (name,
 
            changeset.raw_id)
 
        self._repo.refs["refs/tags/%s" % name] = changeset._commit.id
 

	
 
        self._parsed_refs = self._get_parsed_refs()
 
        self.tags = self._get_tags()
 
        return changeset
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        if name not in self.tags:
 
            raise TagDoesNotExistError("Tag %s does not exist" % name)
 
        tagpath = posixpath.join(self._repo.refs.path, 'refs', 'tags', name)
 
        try:
 
            os.remove(tagpath)
 
            self._parsed_refs = self._get_parsed_refs()
 
            self.tags = self._get_tags()
 
        except OSError, e:
 
            raise RepositoryError(e.strerror)
 

	
 
    @LazyProperty
 
    def _parsed_refs(self):
 
        return self._get_parsed_refs()
 

	
 
    def _get_parsed_refs(self):
 
        refs = self._repo.get_refs()
 
        keys = [('refs/heads/', 'H'),
 
                ('refs/remotes/origin/', 'RH'),
 
                ('refs/tags/', 'T')]
 
        _refs = {}
 
        for ref, sha in refs.iteritems():
 
            for k, type_ in keys:
 
                if ref.startswith(k):
 
                    _key = ref[len(k):]
 
                    if type_ == 'T':
 
                        obj = self._repo.get_object(sha)
 
                        if isinstance(obj, Tag):
 
                            sha = self._repo.get_object(sha).object[1]
 
                    _refs[_key] = [sha, type_]
 
                    break
 
        return _refs
 

	
 
    def _heads(self, reverse=False):
 
        refs = self._repo.get_refs()
 
        heads = {}
 

	
 
        for key, val in refs.items():
 
            for ref_key in ['refs/heads/', 'refs/remotes/origin/']:
 
                if key.startswith(ref_key):
 
                    n = key[len(ref_key):]
 
                    if n not in ['HEAD']:
 
                        heads[n] = val
 

	
 
        return heads if reverse else dict((y, x) for x, y in heads.iteritems())
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns ``GitChangeset`` object representing commit from git repository
 
        at the given revision or head (most recent commit) if None given.
 
        """
 
        if isinstance(revision, GitChangeset):
 
            return revision
 
        revision = self._get_revision(revision)
 
        changeset = GitChangeset(repository=self, revision=revision)
 
        return changeset
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
           end_date=None, branch_name=None, reverse=False):
 
        """
 
        Returns iterator of ``GitChangeset`` objects from start to end (both
 
        are inclusive), in ascending date order (unless ``reverse`` is set).
 

	
 
        :param start: changeset ID, as str; first returned changeset
 
        :param end: changeset ID, as str; last returned changeset
 
        :param start_date: if specified, changesets with commit date less than
 
          ``start_date`` would be filtered out from returned set
 
        :param end_date: if specified, changesets with commit date greater than
 
          ``end_date`` would be filtered out from returned set
 
        :param branch_name: if specified, changesets not reachable from given
 
          branch would be filtered out from returned set
 
        :param reverse: if ``True``, returned generator would be reversed
 
          (meaning that returned changesets would have descending date order)
 

	
 
        :raise BranchDoesNotExistError: If given ``branch_name`` does not
 
            exist.
 
        :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
 
          ``end`` could not be found.
 

	
 
        """
 
        if branch_name and branch_name not in self.branches:
 
            raise BranchDoesNotExistError("Branch '%s' not found" \
 
                                          % branch_name)
 
        # %H at format means (full) commit hash, initial hashes are retrieved
 
        # in ascending date order
 
        cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
 
        cmd_params = {}
 
        if start_date:
 
            cmd_template += ' --since "$since"'
 
            cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
 
        if end_date:
 
            cmd_template += ' --until "$until"'
 
            cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
 
        if branch_name:
 
            cmd_template += ' $branch_name'
 
            cmd_params['branch_name'] = branch_name
 
        else:
 
            rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter',
 
                                                          '--all').strip()
 
            cmd_template += ' %s' % (rev_filter)
 

	
 
        cmd = Template(cmd_template).safe_substitute(**cmd_params)
 
        revs = self.run_git_command(cmd)[0].splitlines()
 
        start_pos = 0
 
        end_pos = len(revs)
 
        if start:
 
            _start = self._get_revision(start)
 
            try:
 
                start_pos = revs.index(_start)
 
            except ValueError:
 
                pass
 

	
 
        if end is not None:
 
            _end = self._get_revision(end)
 
            try:
 
                end_pos = revs.index(_end)
 
            except ValueError:
 
                pass
 

	
 
        if None not in [start, end] and start_pos > end_pos:
 
            raise RepositoryError('start cannot be after end')
 

	
 
        if end_pos is not None:
 
            end_pos += 1
 

	
 
        revs = revs[start_pos:end_pos]
 
        if reverse:
 
            revs = reversed(revs)
 
        for rev in revs:
 
            yield self.get_changeset(rev)
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
                 context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``.
 
        """
 
        flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
 
        if ignore_whitespace:
 
            flags.append('-w')
 

	
 
        if hasattr(rev1, 'raw_id'):
 
            rev1 = getattr(rev1, 'raw_id')
 

	
 
        if hasattr(rev2, 'raw_id'):
 
            rev2 = getattr(rev2, 'raw_id')
 

	
 
        if rev1 == self.EMPTY_CHANGESET:
 
            rev2 = self.get_changeset(rev2).raw_id
 
            cmd = ' '.join(['show'] + flags + [rev2])
 
        else:
 
            rev1 = self.get_changeset(rev1).raw_id
 
            rev2 = self.get_changeset(rev2).raw_id
 
            cmd = ' '.join(['diff'] + flags + [rev1, rev2])
 

	
 
        if path:
 
            cmd += ' -- "%s"' % path
 

	
 
        stdout, stderr = self.run_git_command(cmd)
 
        # If we used 'show' command, strip first few lines (until actual diff
 
        # starts)
 
        if rev1 == self.EMPTY_CHANGESET:
 
            lines = stdout.splitlines()
 
            x = 0
 
            for line in lines:
 
                if line.startswith('diff'):
 
                    break
 
                x += 1
 
            # Append new line just like 'diff' command do
 
            stdout = '\n'.join(lines[x:]) + '\n'
 
        return stdout
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        """
 
        Returns ``GitInMemoryChangeset`` object for this repository.
 
        """
 
        return GitInMemoryChangeset(self)
 

	
 
    def clone(self, url, update_after_clone=True, bare=False):
 
        """
 
        Tries to clone changes from external location.
 

	
 
        :param update_after_clone: If set to ``False``, git won't checkout
 
          working directory
 
        :param bare: If set to ``True``, repository would be cloned into
 
          *bare* git repository (no working directory at all).
 
        """
 
        url = self._get_url(url)
 
        cmd = ['clone']
 
        if bare:
 
            cmd.append('--bare')
 
        elif not update_after_clone:
 
            cmd.append('--no-checkout')
 
        cmd += ['--', '"%s"' % url, '"%s"' % self.path]
 
        cmd = ' '.join(cmd)
 
        # If error occurs run_git_command raises RepositoryError already
 
        self.run_git_command(cmd)
 

	
 
    def pull(self, url):
 
        """
 
        Tries to pull changes from external location.
 
        """
 
        url = self._get_url(url)
 
        cmd = ['pull']
 
        cmd.append("--ff-only")
 
        cmd.append(url)
 
        cmd = ' '.join(cmd)
 
        # If error occurs run_git_command raises RepositoryError already
 
        self.run_git_command(cmd)
 

	
 
    def fetch(self, url):
 
        """
 
        Tries to pull changes from external location.
 
        """
 
        url = self._get_url(url)
 
        so, se = self.run_git_command('ls-remote -h %s' % url)
 
        refs = []
 
        for line in (x for x in so.splitlines()):
 
            sha, ref = line.split('\t')
 
            refs.append(ref)
 
        refs = ' '.join(('+%s:%s' % (r, r) for r in refs))
 
        cmd = '''fetch %s -- %s''' % (url, refs)
 
        self.run_git_command(cmd)
 

	
 
    @LazyProperty
 
    def workdir(self):
 
        """
 
        Returns ``Workdir`` instance for this repository.
 
        """
 
        return GitWorkdir(self)
 

	
 
    def get_config_value(self, section, name, config_file=None):
 
        """
 
        Returns configuration value for a given [``section``] and ``name``.
 

	
 
        :param section: Section we want to retrieve value from
 
        :param name: Name of configuration we want to retrieve
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        if config_file is None:
 
            config_file = []
 
        elif isinstance(config_file, basestring):
 
            config_file = [config_file]
 

	
 
        def gen_configs():
 
            for path in config_file + self._config_files:
 
                try:
 
                    yield ConfigFile.from_path(path)
 
                except (IOError, OSError, ValueError):
 
                    continue
 

	
 
        for config in gen_configs():
 
            try:
 
                return config.get(section, name)
 
            except KeyError:
 
                continue
 
        return None
 

	
 
    def get_user_name(self, config_file=None):
 
        """
 
        Returns user's name from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        return self.get_config_value('user', 'name', config_file)
 

	
 
    def get_user_email(self, config_file=None):
 
        """
 
        Returns user's email from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        return self.get_config_value('user', 'email', config_file)
rhodecode/model/db.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.model.db
 
    ~~~~~~~~~~~~~~~~~~
 

	
 
    Database Models for RhodeCode
 

	
 
    :created_on: Apr 08, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import logging
 
import datetime
 
import traceback
 
import hashlib
 
import time
 
from collections import defaultdict
 

	
 
from sqlalchemy import *
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
 
from sqlalchemy.exc import DatabaseError
 
from beaker.cache import cache_region, region_invalidate
 
from webob.exc import HTTPNotFound
 

	
 
from pylons.i18n.translation import lazy_ugettext as _
 

	
 
from rhodecode.lib.vcs import get_backend
 
from rhodecode.lib.vcs.utils.helpers import get_scm
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 

	
 
from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
 
    safe_unicode, remove_suffix, remove_prefix, time_to_datetime
 
    safe_unicode, remove_suffix, remove_prefix, time_to_datetime, _set_extras
 
from rhodecode.lib.compat import json
 
from rhodecode.lib.caching_query import FromCache
 

	
 
from rhodecode.model.meta import Base, Session
 

	
 
URL_SEP = '/'
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
 

	
 

	
 
class BaseModel(object):
 
    """
 
    Base Model for all classess
 
    """
 

	
 
    @classmethod
 
    def _get_keys(cls):
 
        """return column names for this model """
 
        return class_mapper(cls).c.keys()
 

	
 
    def get_dict(self):
 
        """
 
        return dict with keys and values corresponding
 
        to this model data """
 

	
 
        d = {}
 
        for k in self._get_keys():
 
            d[k] = getattr(self, k)
 

	
 
        # also use __json__() if present to get additional fields
 
        _json_attr = getattr(self, '__json__', None)
 
        if _json_attr:
 
            # update with attributes from __json__
 
            if callable(_json_attr):
 
                _json_attr = _json_attr()
 
            for k, val in _json_attr.iteritems():
 
                d[k] = val
 
        return d
 

	
 
    def get_appstruct(self):
 
        """return list with keys and values tupples corresponding
 
        to this model data """
 

	
 
        l = []
 
        for k in self._get_keys():
 
            l.append((k, getattr(self, k),))
 
        return l
 

	
 
    def populate_obj(self, populate_dict):
 
        """populate model with data from given populate_dict"""
 

	
 
        for k in self._get_keys():
 
            if k in populate_dict:
 
                setattr(self, k, populate_dict[k])
 

	
 
    @classmethod
 
    def query(cls):
 
        return Session().query(cls)
 

	
 
    @classmethod
 
    def get(cls, id_):
 
        if id_:
 
            return cls.query().get(id_)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        try:
 
            id_ = int(id_)
 
        except (TypeError, ValueError):
 
            raise HTTPNotFound
 

	
 
        res = cls.query().get(id_)
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def getAll(cls):
 
        return cls.query().all()
 

	
 
    @classmethod
 
    def delete(cls, id_):
 
        obj = cls.query().get(id_)
 
        Session().delete(obj)
 

	
 
    def __repr__(self):
 
        if hasattr(self, '__unicode__'):
 
            # python repr needs to return str
 
            return safe_str(self.__unicode__())
 
        return '<DB:%s>' % (self.__class__.__name__)
 

	
 

	
 
class RhodeCodeSetting(Base, BaseModel):
 
    __tablename__ = 'rhodecode_settings'
 
    __table_args__ = (
 
        UniqueConstraint('app_settings_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __init__(self, k='', v=''):
 
        self.app_settings_name = k
 
        self.app_settings_value = v
 

	
 
    @validates('_app_settings_value')
 
    def validate_settings_value(self, key, val):
 
        assert type(val) == unicode
 
        return val
 

	
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        if self.app_settings_name in ["ldap_active",
 
                                      "default_repo_enable_statistics",
 
                                      "default_repo_enable_locking",
 
                                      "default_repo_private",
 
                                      "default_repo_enable_downloads"]:
 
            v = str2bool(v)
 
        return v
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use unicode in app_settings_value
 

	
 
        :param val:
 
        """
 
        self._app_settings_value = safe_unicode(val)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__,
 
            self.app_settings_name, self.app_settings_value
 
        )
 

	
 
    @classmethod
 
    def get_by_name(cls, key):
 
        return cls.query()\
 
            .filter(cls.app_settings_name == key).scalar()
 

	
 
    @classmethod
 
    def get_by_name_or_create(cls, key):
 
        res = cls.get_by_name(key)
 
        if not res:
 
            res = cls(key)
 
        return res
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if not ret:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings['rhodecode_' + each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_ldap_settings(cls, cache=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('ldap_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd.update({row.app_settings_name: row.app_settings_value})
 

	
 
        return fd
 

	
 
    @classmethod
 
    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('default_')).all()
 
        fd = {}
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 

	
 
class RhodeCodeUi(Base, BaseModel):
 
    __tablename__ = 'rhodecode_ui'
 
    __table_args__ = (
 
        UniqueConstraint('ui_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'changegroup.push_logger'
 
    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
 
    HOOK_PULL = 'outgoing.pull_logger'
 
    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
 

	
 
    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key).scalar()
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key) or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session().add(new_ui)
 

	
 
    def __repr__(self):
 
        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
 
                                   self.ui_value)
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        UniqueConstraint('username'), UniqueConstraint('email'),
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    DEFAULT_USER = 'default'
 
    DEFAULT_PERMISSIONS = [
 
        'hg.register.manual_activate', 'hg.create.repository',
 
        'hg.fork.repository', 'repository.read', 'group.read'
 
    ]
 
    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UserGroupMember', cascade='all')
 

	
 
    notifications = relationship('UserNotification', cascade='all')
 
    # notifications assigned to this user
 
    user_created_notifications = relationship('Notification', cascade='all')
 
    # comments created by this user
 
    user_comments = relationship('ChangesetComment', cascade='all')
 
    #extra emails for this user
 
    user_emails = relationship('UserEmailMap', cascade='all')
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 
    @property
 
    def firstname(self):
 
        # alias for future
 
        return self.name
 

	
 
    @property
 
    def emails(self):
 
        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
 
        return [self.email] + [x.email for x in other]
 

	
 
    @property
 
    def ip_addresses(self):
 
        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
 
        return [x.ip_addr for x in ret]
 

	
 
    @property
 
    def username_and_name(self):
 
        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name_or_username(self):
 
        return ('%s %s' % (self.firstname, self.lastname)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def AuthUser(self):
 
        """
 
        Returns instance of AuthUser for this user
 
        """
 
        from rhodecode.lib.auth import AuthUser
 
        return AuthUser(user_id=self.user_id, api_key=self.api_key,
 
                        username=self.username)
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                     self.user_id, self.username)
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.username.ilike(username))
 
        else:
 
            q = cls.query().filter(cls.username == username)
 

	
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(username)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key, cache=False):
 
        q = cls.query().filter(cls.api_key == api_key)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_api_key_%s" % api_key))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_email(cls, email, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.email.ilike(email))
 
        else:
 
            q = cls.query().filter(cls.email == email)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_email_key_%s" % email))
 

	
 
        ret = q.scalar()
 
        if ret is None:
 
            q = UserEmailMap.query()
 
            # try fetching in alternate email map
 
            if case_insensitive:
 
                q = q.filter(UserEmailMap.email.ilike(email))
 
            else:
 
                q = q.filter(UserEmailMap.email == email)
 
            q = q.options(joinedload(UserEmailMap.user))
 
            if cache:
 
                q = q.options(FromCache("sql_cache_short",
 
                                        "get_email_map_key_%s" % email))
 
            ret = getattr(q.scalar(), 'user', None)
 

	
 
        return ret
 

	
 
    @classmethod
 
    def get_from_cs_author(cls, author):
 
        """
 
        Tries to get User objects out of commit author string
 

	
 
        :param author:
 
        """
 
        from rhodecode.lib.helpers import email, author_name
 
        # Valid email in the attribute passed, see if they're in the system
 
        _email = email(author)
 
        if _email:
 
            user = cls.get_by_email(_email, case_insensitive=True)
 
            if user:
 
                return user
 
        # Maybe we can match by username?
 
        _author = author_name(author)
 
        user = cls.get_by_username(_author, case_insensitive=True)
 
        if user:
 
            return user
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 
        self.last_login = datetime.datetime.now()
 
        Session().add(self)
 
        log.debug('updated user %s lastlogin' % self.username)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating user related data for API
 
        """
 
        user = self
 
        data = dict(
 
            user_id=user.user_id,
 
            username=user.username,
 
            firstname=user.name,
 
            lastname=user.lastname,
 
            email=user.email,
 
            emails=user.emails,
 
            api_key=user.api_key,
 
            active=user.active,
 
            admin=user.admin,
 
            ldap_dn=user.ldap_dn,
 
            last_login=user.last_login,
 
            ip_addresses=user.ip_addresses
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
            full_name=self.full_name,
 
            full_name_or_username=self.full_name_or_username,
 
            short_contact=self.short_contact,
 
            full_contact=self.full_contact
 
        )
 
        data.update(self.get_api_data())
 
        return data
 

	
 

	
 
class UserEmailMap(Base, BaseModel):
 
    __tablename__ = 'user_email_map'
 
    __table_args__ = (
 
        Index('uem_email_idx', 'email'),
 
        UniqueConstraint('email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    user = relationship('User', lazy='joined')
 

	
 
    @validates('_email')
 
    def validate_email(self, key, email):
 
        # check if this email is not main one
 
        main_email = Session().query(User).filter(User.email == email).scalar()
 
        if main_email is not None:
 
            raise AttributeError('email %s is present is user table' % email)
 
        return email
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 

	
 
class UserIpMap(Base, BaseModel):
 
    __tablename__ = 'user_ip_map'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'ip_addr'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    user = relationship('User', lazy='joined')
 

	
 
    @classmethod
 
    def _get_ip_range(cls, ip_addr):
 
        from rhodecode.lib import ipaddr
 
        net = ipaddr.IPNetwork(address=ip_addr)
 
        return [str(net.network), str(net.broadcast)]
 

	
 
    def __json__(self):
 
        return dict(
 
          ip_addr=self.ip_addr,
 
          ip_range=self._get_ip_range(self.ip_addr)
 
        )
 

	
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    @property
 
    def action_as_day(self):
 
        return datetime.date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository', cascade='')
 

	
 

	
 
class UserGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
 
    users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
 
    users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 

	
 
    def __unicode__(self):
 
        return u'<userGroup(%s)>' % (self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False,
 
                          case_insensitive=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            q = cls.query().filter(cls.users_group_name == group_name)
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(group_name)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get(cls, users_group_id, cache=False):
 
        users_group = cls.query()
 
        if cache:
 
            users_group = users_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % users_group_id))
 
        return users_group.get(users_group_id)
 

	
 
    def get_api_data(self):
 
        users_group = self
 

	
 
        data = dict(
 
            users_group_id=users_group.users_group_id,
 
            group_name=users_group.users_group_name,
 
            active=users_group.users_group_active,
 
        )
 

	
 
        return data
 

	
 

	
 
class UserGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User', lazy='joined')
 
    users_group = relationship('UserGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 

	
 
class RepositoryField(Base, BaseModel):
 
    __tablename__ = 'repositories_fields'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
 

	
 
    repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 
    field_key = Column("field_key", String(250, convert_unicode=False, assert_unicode=None))
 
    field_label = Column("field_label", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
 
    field_value = Column("field_value", String(10000, convert_unicode=False, assert_unicode=None), nullable=False)
 
    field_desc = Column("field_desc", String(1024, convert_unicode=False, assert_unicode=None), nullable=False)
 
    field_type = Column("field_type", String(256), nullable=False, unique=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repository = relationship('Repository')
 

	
 
    @property
 
    def field_key_prefixed(self):
 
        return 'ex_%s' % self.field_key
 

	
 
    @classmethod
 
    def un_prefix_key(cls, key):
 
        if key.startswith(cls.PREFIX):
 
            return key[len(cls.PREFIX):]
 
        return key
 

	
 
    @classmethod
 
    def get_by_key_name(cls, key, repo):
 
        row = cls.query()\
 
                .filter(cls.repository == repo)\
 
                .filter(cls.field_key == key).scalar()
 
        return row
 

	
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        UniqueConstraint('repo_name'),
 
        Index('r_repo_name_idx', 'repo_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 
    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
 

	
 
    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
 
                             cascade='all')
 
    extra_fields = relationship('RepositoryField',
 
                                cascade="all, delete, delete-orphan")
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
 
                                   self.repo_name)
 

	
 
    @hybrid_property
 
    def locked(self):
 
        # always should return [user_id, timelocked]
 
        if self._locked:
 
            _lock_info = self._locked.split(':')
 
            return int(_lock_info[0]), _lock_info[1]
 
        return [None, None]
 

	
 
    @locked.setter
 
    def locked(self, val):
 
        if val and isinstance(val, (list, tuple)):
 
            self._locked = ':'.join(map(str, val))
 
        else:
 
            self._locked = None
 

	
 
    @hybrid_property
 
    def changeset_cache(self):
 
        from rhodecode.lib.vcs.backends.base import EmptyChangeset
 
        dummy = EmptyChangeset().__json__()
 
        if not self._changeset_cache:
 
            return dummy
 
        try:
 
            return json.loads(self._changeset_cache)
 
        except TypeError:
 
            return dummy
 

	
 
    @changeset_cache.setter
 
    def changeset_cache(self, val):
 
        try:
 
            self._changeset_cache = json.dumps(val)
 
        except:
 
            log.error(traceback.format_exc())
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def normalize_repo_name(cls, repo_name):
 
        """
 
        Normalizes os specific repo_name to the format internally stored inside
 
        dabatabase using URL_SEP
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        return cls.url_sep().join(repo_name.split(os.sep))
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        repo_name = cls.normalize_repo_name(repo_name)
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(RhodeCodeUi)\
 
            .filter(RhodeCodeUi.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*p)
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from rhodecode.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def inject_ui(cls, repo, extras={}):
 
        repo.inject_ui(extras)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from rhodecode.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads,
 
            last_changeset=repo.changeset_cache,
 
            locked_by=User.get(self.locked[0]).get_api_data() \
 
                if self.locked[0] else None,
 
            locked_date=time_to_datetime(self.locked[1]) \
 
                if self.locked[1] else None
 
        )
 
        rc_config = RhodeCodeSetting.get_app_settings()
 
        repository_fields = str2bool(rc_config.get('rhodecode_repository_fields'))
 
        if repository_fields:
 
            for f in self.extra_fields:
 
                data[f.field_key_prefixed] = f.field_value
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id):
 
        repo.locked = [user_id, time.time()]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    def clone_url(self, **override):
 
        from pylons import url
 
        from urlparse import urlparse
 
        import urllib
 
        parsed_url = urlparse(url('home', qualified=True))
 
        default_clone_uri = '%(scheme)s://%(user)s%(pass)s%(netloc)s%(prefix)s%(path)s'
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        args = {
 
           'user': '',
 
           'pass': '',
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'prefix': decoded_path,
 
           'path': self.repo_name
 
        }
 

	
 
        args.update(override)
 
        return default_clone_uri % args
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from rhodecode.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
                cs_cache = scm_repo.get_changeset()
 

	
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if (cs_cache != self.changeset_cache or not self.changeset_cache):
 
            _default = datetime.datetime.fromtimestamp(0)
 
            last_change = cs_cache.get('date') or _default
 
            log.debug('updated repo %s with new cs cache %s' % (self, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 
        else:
 
            log.debug('Skipping repo:%s already with latest changes' % self)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        :type revisions: list
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    def _repo_size(self):
 
        from rhodecode.lib import helpers as h
 
        log.debug('calculating repository size...')
 
        return h.format_byte_size(self.scm_instance.size)
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
 

	
 
    def scm_instance_no_cache(self):
 
        return self.__get_instance()
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        import rhodecode
 
        full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, cache_map=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 
        log.debug('Getting cached instance of repo')
 

	
 
        if cache_map:
 
            # get using prefilled cache_map
 
            invalidate_repo = cache_map[self.repo_name]
 
            if invalidate_repo:
 
                invalidate_repo = (None if invalidate_repo.cache_active
 
                                   else invalidate_repo)
 
        else:
 
            # get from invalidate
 
            invalidate_repo = self.invalidate
 

	
 
        if invalidate_repo is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(invalidate_repo.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository from %s'
 
                      % (alias, repo_full_path))
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        UniqueConstraint('group_name', 'group_parent_id'),
 
        CheckConstraint('group_id != group_parent_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    __mapper_args__ = {'order_by': 'group_name'}
 

	
 
    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 

	
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                  self.group_name)
 

	
 
    @classmethod
 
    def groups_choices(cls, groups=None, show_empty_group=True):
 
        from webhelpers.html import literal as _literal
 
        if not groups:
 
            groups = cls.query().all()
 

	
 
        repo_groups = []
 
        if show_empty_group:
 
            repo_groups = [('-1', '-- %s --' % _('top level'))]
 
        sep = ' &raquo; '
 
        _name = lambda k: _literal(sep.join(k))
 

	
 
        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
 
                              for x in groups])
 

	
 
        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
 
        return repo_groups
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 5
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinit loops
 
                log.error('group nested more than %s' %
 
                          parents_recursion_limit)
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(RepoGroup.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(RepoGroup.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query()\
 
                .filter(Repository.group == self)\
 
                .order_by(Repository.repo_name)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 
    def _recursive_objects(self, include_repos=True):
 
        all_ = []
 

	
 
        def _get_members(root_gr):
 
            if include_repos:
 
                for r in root_gr.repositories:
 
                    all_.append(r)
 
            childs = root_gr.children.all()
 
            if childs:
 
                for gr in childs:
 
                    all_.append(gr)
 
                    _get_members(gr)
 

	
 
        _get_members(self)
 
        return [self] + all_
 

	
 
    def recursive_groups_and_repos(self):
 
        """
 
        Recursive return all groups, with repositories in those groups
 
        """
 
        return self._recursive_objects()
 

	
 
    def recursive_groups(self):
 
        """
 
        Returns all children groups for this group including children of children
 
        """
 
        return self._recursive_objects(include_repos=False)
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return RepoGroup.url_sep().join(path_prefix + [group_name])
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = (
 
        Index('p_perm_name_idx', 'permission_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    PERMS = [
 
        ('repository.none', _('Repository no access')),
 
        ('repository.read', _('Repository read access')),
 
        ('repository.write', _('Repository write access')),
 
        ('repository.admin', _('Repository admin access')),
 

	
 
        ('group.none', _('Repository group no access')),
 
        ('group.read', _('Repository group read access')),
 
        ('group.write', _('Repository group write access')),
 
        ('group.admin', _('Repository group admin access')),
 

	
 
        ('hg.admin', _('RhodeCode Administrator')),
 
        ('hg.create.none', _('Repository creation disabled')),
 
        ('hg.create.repository', _('Repository creation enabled')),
 
        ('hg.fork.none', _('Repository forking disabled')),
 
        ('hg.fork.repository', _('Repository forking enabled')),
 
        ('hg.register.none', _('Register disabled')),
 
        ('hg.register.manual_activate', _('Register new user with RhodeCode '
 
                                          'with manual activation')),
 

	
 
        ('hg.register.auto_activate', _('Register new user with RhodeCode '
 
                                        'with auto activation')),
 
    ]
 

	
 
    # defines which permissions are more important higher the more important
 
    PERM_WEIGHTS = {
 
        'repository.none': 0,
 
        'repository.read': 1,
 
        'repository.write': 3,
 
        'repository.admin': 4,
 

	
 
        'group.none': 0,
 
        'group.read': 1,
 
        'group.write': 3,
 
        'group.admin': 4,
 

	
 
        'hg.fork.none': 0,
 
        'hg.fork.repository': 1,
 
        'hg.create.none': 0,
 
        'hg.create.repository':1
 
    }
 

	
 
    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__, self.permission_id, self.permission_name
 
        )
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
    @classmethod
 
    def get_default_perms(cls, default_user_id):
 
        q = Session().query(UserRepoToPerm, Repository, cls)\
 
         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
 
         .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_group_perms(cls, default_user_id):
 
        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
 
         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 

	
 
class UserRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, repository, permission):
 
        n = cls()
 
        n.user = user
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<user:%s => %s >' % (self.user, self.repository)
 

	
 

	
 
class UserToPerm(Base, BaseModel):
 
    __tablename__ = 'user_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission', lazy='joined')
 

	
 

	
 
class UserGroupRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
    @classmethod
 
    def create(cls, users_group, repository, permission):
 
        n = cls()
 
        n.users_group = users_group
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
 

	
 

	
 
class UserGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'permission_id',),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    group = relationship('RepoGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserGroupRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'group_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UserGroup')
 
    permission = relationship('Permission')
 
    group = relationship('RepoGroup')
 

	
 

	
 
class Statistics(Base, BaseModel):
 
    __tablename__ = 'statistics'
 
    __table_args__ = (
 
         UniqueConstraint('repository_id'),
 
         {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
          'mysql_charset': 'utf8'}
 
    )
 
    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
 
    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
 
    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
 
    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
 
    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
 

	
 
    repository = relationship('Repository', single_parent=True)
 

	
 

	
 
class UserFollowing(Base, BaseModel):
 
    __tablename__ = 'user_followings'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'follows_repository_id'),
 
        UniqueConstraint('user_id', 'follows_user_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
 
    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 

	
 
    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
 
    follows_repository = relationship('Repository', order_by='Repository.repo_name')
 

	
 
    @classmethod
 
    def get_repo_followers(cls, repo_id):
 
        return cls.query().filter(cls.follows_repo_id == repo_id)
 

	
 

	
 
class CacheInvalidation(Base, BaseModel):
 
    __tablename__ = 'cache_invalidation'
 
    __table_args__ = (
 
        UniqueConstraint('cache_key'),
 
        Index('key_idx', 'cache_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
 

	
 
    def __init__(self, cache_key, cache_args=''):
 
        self.cache_key = cache_key
 
        self.cache_args = cache_args
 
        self.cache_active = False
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.cache_id, self.cache_key)
 

	
 
    @property
 
    def prefix(self):
 
        _split = self.cache_key.split(self.cache_args, 1)
 
        if _split and len(_split) == 2:
 
            return _split[0]
 
        return ''
 

	
 
    @classmethod
 
    def clear_cache(cls):
 
        cls.query().delete()
 

	
 
    @classmethod
 
    def _get_key(cls, key):
 
        """
 
        Wrapper for generating a key, together with a prefix
 

	
 
        :param key:
 
        """
 
        import rhodecode
 
        prefix = ''
 
        org_key = key
 
        iid = rhodecode.CONFIG.get('instance_id')
 
        if iid:
 
            prefix = iid
 

	
 
        return "%s%s" % (prefix, key), prefix, org_key
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.cache_key == key).scalar()
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        return cls.query().filter(cls.cache_args == repo_name).all()
 

	
 
    @classmethod
 
    def _get_or_create_key(cls, key, repo_name, commit=True):
 
        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
 
        if not inv_obj:
 
            try:
 
                inv_obj = CacheInvalidation(key, repo_name)
 
                Session().add(inv_obj)
 
                if commit:
 
                    Session().commit()
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                Session().rollback()
 
        return inv_obj
 

	
 
    @classmethod
 
    def invalidate(cls, key):
 
        """
 
        Returns Invalidation object if this given key should be invalidated
 
        None otherwise. `cache_active = False` means that this cache
 
        state is not valid and needs to be invalidated
 

	
 
        :param key:
 
        """
 
        repo_name = key
 
        repo_name = remove_suffix(repo_name, '_README')
 
        repo_name = remove_suffix(repo_name, '_RSS')
 
        repo_name = remove_suffix(repo_name, '_ATOM')
 

	
 
        # adds instance prefix
 
        key, _prefix, _org_key = cls._get_key(key)
 
        inv = cls._get_or_create_key(key, repo_name)
 

	
 
        if inv and inv.cache_active is False:
 
            return inv
 

	
 
    @classmethod
 
    def set_invalidate(cls, key=None, repo_name=None):
 
        """
 
        Mark this Cache key for invalidation, either by key or whole
 
        cache sets based on repo_name
 

	
 
        :param key:
 
        """
 
        invalidated_keys = []
 
        if key:
 
            key, _prefix, _org_key = cls._get_key(key)
 
            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
 
        elif repo_name:
 
            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 

	
 
        try:
 
            for inv_obj in inv_objs:
 
                inv_obj.cache_active = False
 
                log.debug('marking %s key for invalidation based on key=%s,repo_name=%s'
 
                  % (inv_obj, key, safe_str(repo_name)))
 
                invalidated_keys.append(inv_obj.cache_key)
 
                Session().add(inv_obj)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 
        return invalidated_keys
 

	
 
    @classmethod
 
    def set_valid(cls, key):
 
        """
 
        Mark this cache key as active and currently cached
 

	
 
        :param key:
 
        """
 
        inv_obj = cls.get_by_key(key)
 
        inv_obj.cache_active = True
 
        Session().add(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def get_cache_map(cls):
 

	
 
        class cachemapdict(dict):
 

	
 
            def __init__(self, *args, **kwargs):
 
                fixkey = kwargs.get('fixkey')
 
                if fixkey:
 
                    del kwargs['fixkey']
 
                self.fixkey = fixkey
 
                super(cachemapdict, self).__init__(*args, **kwargs)
 

	
 
            def __getattr__(self, name):
 
                key = name
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                if key in self.__dict__:
 
                    return self.__dict__[key]
 
                else:
 
                    return self[key]
 

	
 
            def __getitem__(self, key):
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                try:
 
                    return super(cachemapdict, self).__getitem__(key)
 
                except KeyError:
 
                    return
 

	
 
        cache_map = cachemapdict(fixkey=True)
 
        for obj in cls.query().all():
 
            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
 
        return cache_map
 

	
 

	
 
class ChangesetComment(Base, BaseModel):
 
    __tablename__ = 'changeset_comments'
 
    __table_args__ = (
 
        Index('cc_revision_idx', 'revision'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
 
    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    revision = Column('revision', String(40), nullable=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 
    line_no = Column('line_no', Unicode(10), nullable=True)
 
    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
 
    f_path = Column('f_path', Unicode(1000), nullable=True)
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    text = Column('text', UnicodeText(25000), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    author = relationship('User', lazy='joined')
 
    repo = relationship('Repository')
 
    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
 
    pull_request = relationship('PullRequest', lazy='joined')
 

	
 
    @classmethod
 
    def get_users(cls, revision=None, pull_request_id=None):
 
        """
 
        Returns user associated with this ChangesetComment. ie those
 
        who actually commented
 

	
 
        :param cls:
 
        :param revision:
 
        """
 
        q = Session().query(User)\
 
                .join(ChangesetComment.author)
 
        if revision:
 
            q = q.filter(cls.revision == revision)
 
        elif pull_request_id:
 
            q = q.filter(cls.pull_request_id == pull_request_id)
 
        return q.all()
 

	
 

	
 
class ChangesetStatus(Base, BaseModel):
 
    __tablename__ = 'changeset_statuses'
 
    __table_args__ = (
 
        Index('cs_revision_idx', 'revision'),
 
        Index('cs_version_idx', 'version'),
 
        UniqueConstraint('repo_id', 'revision', 'version'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
 
    STATUS_APPROVED = 'approved'
 
    STATUS_REJECTED = 'rejected'
 
    STATUS_UNDER_REVIEW = 'under_review'
 

	
 
    STATUSES = [
 
        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
 
        (STATUS_APPROVED, _("Approved")),
 
        (STATUS_REJECTED, _("Rejected")),
 
        (STATUS_UNDER_REVIEW, _("Under Review")),
 
    ]
 

	
 
    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
 
    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    revision = Column('revision', String(40), nullable=False)
 
    status = Column('status', String(128), nullable=False, default=DEFAULT)
 
    changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
 
    modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
 
    version = Column('version', Integer(), nullable=False, default=0)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 

	
 
    author = relationship('User', lazy='joined')
 
    repo = relationship('Repository')
 
    comment = relationship('ChangesetComment', lazy='joined')
 
    pull_request = relationship('PullRequest', lazy='joined')
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__,
 
            self.status, self.author
 
        )
 

	
 
    @classmethod
 
    def get_status_lbl(cls, value):
 
        return dict(cls.STATUSES).get(value)
 

	
 
    @property
 
    def status_lbl(self):
 
        return ChangesetStatus.get_status_lbl(self.status)
 

	
 

	
 
class PullRequest(Base, BaseModel):
 
    __tablename__ = 'pull_requests'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    STATUS_NEW = u'new'
 
    STATUS_OPEN = u'open'
 
    STATUS_CLOSED = u'closed'
 

	
 
    pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True)
 
    title = Column('title', Unicode(256), nullable=True)
 
    description = Column('description', UnicodeText(10240), nullable=True)
 
    status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    _revisions = Column('revisions', UnicodeText(20500))  # 500 revisions max
 
    org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column('org_ref', Unicode(256), nullable=False)
 
    other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column('other_ref', Unicode(256), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    author = relationship('User', lazy='joined')
 
    reviewers = relationship('PullRequestReviewers',
 
                             cascade="all, delete, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus')
 
    comments = relationship('ChangesetComment',
 
                             cascade="all, delete, delete-orphan")
 

	
 
    def is_closed(self):
 
        return self.status == self.STATUS_CLOSED
 

	
 
    @property
 
    def last_review_status(self):
 
        return self.statuses[-1].status if self.statuses else ''
 

	
 
    def __json__(self):
 
        return dict(
 
          revisions=self.revisions
 
        )
 

	
 

	
 
class PullRequestReviewers(Base, BaseModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_requests_reviewers_id = Column('pull_requests_reviewers_id', Integer(), nullable=False, primary_key=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 

	
 
class Notification(Base, BaseModel):
 
    __tablename__ = 'notifications'
 
    __table_args__ = (
 
        Index('notification_type_idx', 'type'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention'
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 

	
 
    notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
 
    subject = Column('subject', Unicode(512), nullable=True)
 
    body = Column('body', UnicodeText(50000), nullable=True)
 
    created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    type_ = Column('type', Unicode(256))
 

	
 
    created_by_user = relationship('User')
 
    notifications_to_users = relationship('UserNotification', lazy='joined',
 
                                          cascade="all, delete, delete-orphan")
 

	
 
    @property
 
    def recipients(self):
 
        return [x.user for x in UserNotification.query()\
 
                .filter(UserNotification.notification == self)\
 
                .order_by(UserNotification.user_id.asc()).all()]
 

	
 
    @classmethod
 
    def create(cls, created_by, subject, body, recipients, type_=None):
 
        if type_ is None:
 
            type_ = Notification.TYPE_MESSAGE
 

	
 
        notification = cls()
 
        notification.created_by_user = created_by
 
        notification.subject = subject
 
        notification.body = body
 
        notification.type_ = type_
 
        notification.created_on = datetime.datetime.now()
 

	
 
        for u in recipients:
 
            assoc = UserNotification()
 
            assoc.notification = notification
 
            u.notifications.append(assoc)
 
        Session().add(notification)
 
        return notification
 

	
 
    @property
 
    def description(self):
 
        from rhodecode.model.notification import NotificationModel
 
        return NotificationModel().make_description(self)
 

	
 

	
 
class UserNotification(Base, BaseModel):
 
    __tablename__ = 'user_to_notification'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'notification_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
 
    notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
 
    read = Column('read', Boolean, default=False)
 
    sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
 

	
 
    user = relationship('User', lazy="joined")
 
    notification = relationship('Notification', lazy="joined",
 
                                order_by=lambda: Notification.created_on.desc(),)
 

	
 
    def mark_as_read(self):
 
        self.read = True
 
        Session().add(self)
 

	
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
rhodecode/model/scm.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.model.scm
 
    ~~~~~~~~~~~~~~~~~~~
 

	
 
    Scm model for RhodeCode
 

	
 
    :created_on: Apr 9, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
from __future__ import with_statement
 
import os
 
import re
 
import time
 
import traceback
 
import logging
 
import cStringIO
 
import pkg_resources
 
from os.path import dirname as dn, join as jn
 

	
 
from sqlalchemy import func
 
from pylons.i18n.translation import _
 

	
 
import rhodecode
 
from rhodecode.lib.vcs import get_backend
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.nodes import FileNode
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 

	
 
from rhodecode import BACKENDS
 
from rhodecode.lib import helpers as h
 
from rhodecode.lib.utils2 import safe_str, safe_unicode, get_server_url
 
from rhodecode.lib.utils2 import safe_str, safe_unicode, get_server_url,\
 
    _set_extras
 
from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
 
from rhodecode.lib.utils import get_filesystem_repos, make_ui, \
 
    action_logger, REMOVED_REPO_PAT
 
from rhodecode.model import BaseModel
 
from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
 
    UserFollowing, UserLog, User, RepoGroup, PullRequest
 
from rhodecode.lib.hooks import log_push_action
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
 

	
 

	
 
class CachedRepoList(object):
 
    """
 
    Cached repo list, uses in-memory cache after initialization, that is
 
    super fast
 
    """
 

	
 
    def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
 
        self.db_repo_list = db_repo_list
 
        self.repos_path = repos_path
 
        self.order_by = order_by
 
        self.reversed = (order_by or '').startswith('-')
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write',
 
                        'repository.admin']
 
        self.perm_set = perm_set
 

	
 
    def __len__(self):
 
        return len(self.db_repo_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        # pre-propagated cache_map to save executing select statements
 
        # for each repo
 
        cache_map = CacheInvalidation.get_cache_map()
 

	
 
        for dbr in self.db_repo_list:
 
            scmr = dbr.scm_instance_cached(cache_map)
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set
 
            )(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            try:
 
                last_change = scmr.last_change
 
                tip = h.get_changeset_safe(scmr, 'tip')
 
            except Exception:
 
                log.error(
 
                    '%s this repository is present in database but it '
 
                    'cannot be created as an scm instance, org_exc:%s'
 
                    % (dbr.repo_name, traceback.format_exc())
 
                )
 
                continue
 

	
 
            tmp_d = {}
 
            tmp_d['name'] = dbr.repo_name
 
            tmp_d['name_sort'] = tmp_d['name'].lower()
 
            tmp_d['raw_name'] = tmp_d['name'].lower()
 
            tmp_d['description'] = dbr.description
 
            tmp_d['description_sort'] = tmp_d['description'].lower()
 
            tmp_d['last_change'] = last_change
 
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
 
            tmp_d['tip'] = tip.raw_id
 
            tmp_d['tip_sort'] = tip.revision
 
            tmp_d['rev'] = tip.revision
 
            tmp_d['contact'] = dbr.user.full_contact
 
            tmp_d['contact_sort'] = tmp_d['contact']
 
            tmp_d['owner_sort'] = tmp_d['contact']
 
            tmp_d['repo_archives'] = list(scmr._get_archives())
 
            tmp_d['last_msg'] = tip.message
 
            tmp_d['author'] = tip.author
 
            tmp_d['dbrepo'] = dbr.get_dict()
 
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
 
            yield tmp_d
 

	
 

	
 
class SimpleCachedRepoList(CachedRepoList):
 
    """
 
    Lighter version of CachedRepoList without the scm initialisation
 
    """
 

	
 
    def __iter__(self):
 
        for dbr in self.db_repo_list:
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set
 
            )(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            tmp_d = {}
 
            tmp_d['name'] = dbr.repo_name
 
            tmp_d['name_sort'] = tmp_d['name'].lower()
 
            tmp_d['raw_name'] = tmp_d['name'].lower()
 
            tmp_d['description'] = dbr.description
 
            tmp_d['description_sort'] = tmp_d['description'].lower()
 
            tmp_d['dbrepo'] = dbr.get_dict()
 
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
 
            yield tmp_d
 

	
 

	
 
class GroupList(object):
 

	
 
    def __init__(self, db_repo_group_list, perm_set=None):
 
        """
 
        Creates iterator from given list of group objects, additionally
 
        checking permission for them from perm_set var
 

	
 
        :param db_repo_group_list:
 
        :param perm_set: list of permissons to check
 
        """
 
        self.db_repo_group_list = db_repo_group_list
 
        if not perm_set:
 
            perm_set = ['group.read', 'group.write', 'group.admin']
 
        self.perm_set = perm_set
 

	
 
    def __len__(self):
 
        return len(self.db_repo_group_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        for dbgr in self.db_repo_group_list:
 
            # check permission at this level
 
            if not HasReposGroupPermissionAny(
 
                *self.perm_set
 
            )(dbgr.group_name, 'get group repo check'):
 
                continue
 

	
 
            yield dbgr
 

	
 

	
 
class ScmModel(BaseModel):
 
    """
 
    Generic Scm Model
 
    """
 

	
 
    def __get_repo(self, instance):
 
        cls = Repository
 
        if isinstance(instance, cls):
 
            return instance
 
        elif isinstance(instance, int) or safe_str(instance).isdigit():
 
            return cls.get(instance)
 
        elif isinstance(instance, basestring):
 
            return cls.get_by_repo_name(instance)
 
        elif instance:
 
            raise Exception('given object must be int, basestr or Instance'
 
                            ' of %s got %s' % (type(cls), type(instance)))
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Get's the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
 

	
 
        return q.ui_value
 

	
 
    def repo_scan(self, repos_path=None):
 
        """
 
        Listing of repositories in given path. This path should not be a
 
        repository itself. Return a dictionary of repository objects
 

	
 
        :param repos_path: path to directory containing repositories
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s' % repos_path)
 

	
 
        baseui = make_ui('db')
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path, recursive=True):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for rhodecode
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories' % (len(repos)))
 
        return repos
 

	
 
    def get_repos(self, all_repos=None, sort_key=None, simple=False):
 
        """
 
        Get all repos from db and for each repo create it's
 
        backend instance and fill that backed with information from database
 

	
 
        :param all_repos: list of repository names as strings
 
            give specific repositories list, good for filtering
 

	
 
        :param sort_key: initial sorting of repos
 
        :param simple: use SimpleCachedList - one without the SCM info
 
        """
 
        if all_repos is None:
 
            all_repos = self.sa.query(Repository)\
 
                        .filter(Repository.group_id == None)\
 
                        .order_by(func.lower(Repository.repo_name)).all()
 
        if simple:
 
            repo_iter = SimpleCachedRepoList(all_repos,
 
                                             repos_path=self.repos_path,
 
                                             order_by=sort_key)
 
        else:
 
            repo_iter = CachedRepoList(all_repos,
 
                                       repos_path=self.repos_path,
 
                                       order_by=sort_key)
 

	
 
        return repo_iter
 

	
 
    def get_repos_groups(self, all_groups=None):
 
        if all_groups is None:
 
            all_groups = RepoGroup.query()\
 
                .filter(RepoGroup.group_parent_id == None).all()
 
        return [x for x in GroupList(all_groups)]
 

	
 
    def mark_for_invalidation(self, repo_name):
 
        """
 
        Puts cache invalidation task into db for
 
        further global cache invalidation
 

	
 
        :param repo_name: this repo that should invalidation take place
 
        """
 
        invalidated_keys = CacheInvalidation.set_invalidate(repo_name=repo_name)
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo:
 
            repo.update_changeset_cache()
 
        return invalidated_keys
 

	
 
    def toggle_following_repo(self, follow_repo_id, user_id):
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repo_id == follow_repo_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                action_logger(UserTemp(user_id),
 
                              'stopped_following_repo',
 
                              RepoTemp(follow_repo_id))
 
                return
 
            except:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_repo_id = follow_repo_id
 
            self.sa.add(f)
 

	
 
            action_logger(UserTemp(user_id),
 
                          'started_following_repo',
 
                          RepoTemp(follow_repo_id))
 
        except:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def toggle_following_user(self, follow_user_id, user_id):
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user_id == follow_user_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                return
 
            except:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_user_id = follow_user_id
 
            self.sa.add(f)
 
        except:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def is_following_repo(self, repo_name, user_id, cache=False):
 
        r = self.sa.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repository == r)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def is_following_user(self, username, user_id, cache=False):
 
        u = User.get_by_username(username)
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user == u)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def get_followers(self, repo):
 
        repo = self._get_repo(repo)
 

	
 
        return self.sa.query(UserFollowing)\
 
                .filter(UserFollowing.follows_repository == repo).count()
 

	
 
    def get_forks(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(Repository)\
 
                .filter(Repository.fork == repo).count()
 

	
 
    def get_pull_requests(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(PullRequest)\
 
                .filter(PullRequest.other_repo == repo).count()
 

	
 
    def mark_as_fork(self, repo, fork, user):
 
        repo = self.__get_repo(repo)
 
        fork = self.__get_repo(fork)
 
        if fork and repo.repo_id == fork.repo_id:
 
            raise Exception("Cannot set repository as fork of itself")
 
        repo.fork = fork
 
        self.sa.add(repo)
 
        return repo
 

	
 
    def _handle_push(self, repo, username, action, repo_name, revisions):
 
        """
 
        Triggers push action hooks
 

	
 
        :param repo: SCM repo
 
        :param username: username who pushes
 
        :param action: push/push_loca/push_remote
 
        :param repo_name: name of repo
 
        :param revisions: list of revisions that we pushed
 
        """
 
        from rhodecode import CONFIG
 
        from rhodecode.lib.base import _get_ip_addr
 
        try:
 
            from pylons import request
 
            environ = request.environ
 
        except TypeError:
 
            # we might use this outside of request context, let's fake the
 
            # environ data
 
            from webob import Request
 
            environ = Request.blank('').environ
 

	
 
        #trigger push hook
 
        extras = {
 
            'ip': _get_ip_addr(environ),
 
            'username': username,
 
            'action': 'push_local',
 
            'repository': repo_name,
 
            'scm': repo.alias,
 
            'config': CONFIG['__file__'],
 
            'server_url': get_server_url(environ),
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        _scm_repo = repo._repo
 
        repo.inject_ui(**extras)
 
        _set_extras(extras)
 
        if repo.alias == 'hg':
 
            log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0])
 
        elif repo.alias == 'git':
 
            log_push_action(_scm_repo.ui, _scm_repo, _git_revs=revisions)
 

	
 
    def _get_IMC_module(self, scm_type):
 
        """
 
        Returns InMemoryCommit class based on scm_type
 

	
 
        :param scm_type:
 
        """
 
        if scm_type == 'hg':
 
            from rhodecode.lib.vcs.backends.hg import \
 
                MercurialInMemoryChangeset as IMC
 
        elif scm_type == 'git':
 
            from rhodecode.lib.vcs.backends.git import \
 
                GitInMemoryChangeset as IMC
 
        return IMC
 

	
 
    def pull_changes(self, repo, username):
 
        dbrepo = self.__get_repo(repo)
 
        clone_uri = dbrepo.clone_uri
 
        if not clone_uri:
 
            raise Exception("This repository doesn't have a clone uri")
 

	
 
        repo = dbrepo.scm_instance
 
        repo_name = dbrepo.repo_name
 
        try:
 
            if repo.alias == 'git':
 
                repo.fetch(clone_uri)
 
            else:
 
                repo.pull(clone_uri)
 
            self.mark_for_invalidation(repo_name)
 
        except:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, author, message,
 
                      content, f_path):
 
        """
 
        Commits changes
 

	
 
        :param repo: SCM instance
 

	
 
        """
 
        user = self._get_user(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        # message and author needs to be unicode
 
        # proper backend should then translate that into required type
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        m = IMC(repo)
 
        m.change(FileNode(path, content))
 
        tip = m.commit(message=message,
 
                       author=author,
 
                       parents=[cs], branch=cs.branch)
 

	
 
        self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def create_node(self, repo, repo_name, cs, user, author, message, content,
 
                      f_path):
 
        user = self._get_user(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        if isinstance(content, (basestring,)):
 
            content = safe_str(content)
 
        elif isinstance(content, (file, cStringIO.OutputType,)):
 
            content = content.read()
 
        else:
 
            raise Exception('Content is of unrecognized type %s' % (
 
                type(content)
 
            ))
 

	
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        path = safe_str(f_path)
 
        m = IMC(repo)
 

	
 
        if isinstance(cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [cs]
 

	
 
        m.add(FileNode(path, content=content))
 
        tip = m.commit(message=message,
 
                       author=author,
 
                       parents=parents, branch=cs.branch)
 

	
 
        self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 

	
 
        :param repo_name: name of repository
 
        :param revision: revision for which to list nodes
 
        :param root_path: root path to list
 
        :param flat: return as a list, if False returns a dict with decription
 

	
 
        """
 
        _files = list()
 
        _dirs = list()
 
        try:
 
            _repo = self.__get_repo(repo_name)
 
            changeset = _repo.scm_instance.get_changeset(revision)
 
            root_path = root_path.lstrip('/')
 
            for topnode, dirs, files in changeset.walk(root_path):
 
                for f in files:
 
                    _files.append(f.path if flat else {"name": f.path,
 
                                                       "type": "file"})
 
                for d in dirs:
 
                    _dirs.append(d.path if flat else {"name": d.path,
 
                                                      "type": "dir"})
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            raise
 

	
 
        return _dirs, _files
 

	
 
    def get_unread_journal(self):
 
        return self.sa.query(UserLog).count()
 

	
 
    def get_repo_landing_revs(self, repo=None):
 
        """
 
        Generates select option with tags branches and bookmarks (for hg only)
 
        grouped by type
 

	
 
        :param repo:
 
        :type repo:
 
        """
 

	
 
        hist_l = []
 
        choices = []
 
        repo = self.__get_repo(repo)
 
        hist_l.append(['tip', _('latest tip')])
 
        choices.append('tip')
 
        if not repo:
 
            return choices, hist_l
 

	
 
        repo = repo.scm_instance
 

	
 
        branches_group = ([(k, k) for k, v in
 
                           repo.branches.iteritems()], _("Branches"))
 
        hist_l.append(branches_group)
 
        choices.extend([x[0] for x in branches_group[0]])
 

	
 
        if repo.alias == 'hg':
 
            bookmarks_group = ([(k, k) for k, v in
 
                                repo.bookmarks.iteritems()], _("Bookmarks"))
 
            hist_l.append(bookmarks_group)
 
            choices.extend([x[0] for x in bookmarks_group[0]])
 

	
 
        tags_group = ([(k, k) for k, v in
 
                       repo.tags.iteritems()], _("Tags"))
 
        hist_l.append(tags_group)
 
        choices.extend([x[0] for x in tags_group[0]])
 

	
 
        return choices, hist_l
 

	
 
    def install_git_hook(self, repo, force_create=False):
 
        """
 
        Creates a rhodecode hook inside a git repository
 

	
 
        :param repo: Instance of VCS repo
 
        :param force_create: Create even if same name hook exists
 
        """
 

	
 
        loc = jn(repo.path, 'hooks')
 
        if not repo.bare:
 
            loc = jn(repo.path, '.git', 'hooks')
 
        if not os.path.isdir(loc):
 
            os.makedirs(loc)
 

	
 
        tmpl_post = pkg_resources.resource_string(
 
            'rhodecode', jn('config', 'post_receive_tmpl.py')
 
        )
 
        tmpl_pre = pkg_resources.resource_string(
 
            'rhodecode', jn('config', 'pre_receive_tmpl.py')
 
        )
 

	
 
        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
 
            _hook_file = jn(loc, '%s-receive' % h_type)
 
            _rhodecode_hook = False
 
            log.debug('Installing git hook in repo %s' % repo)
 
            if os.path.exists(_hook_file):
 
                # let's take a look at this hook, maybe it's rhodecode ?
 
                log.debug('hook exists, checking if it is from rhodecode')
 
                _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
 
                with open(_hook_file, 'rb') as f:
 
                    data = f.read()
 
                    matches = re.compile(r'(?:%s)\s*=\s*(.*)'
 
                                         % 'RC_HOOK_VER').search(data)
 
                    if matches:
 
                        try:
 
                            ver = matches.groups()[0]
 
                            log.debug('got %s it is rhodecode' % (ver))
 
                            _rhodecode_hook = True
 
                        except:
 
                            log.error(traceback.format_exc())
 
            else:
 
                # there is no hook in this dir, so we want to create one
 
                _rhodecode_hook = True
 

	
 
            if _rhodecode_hook or force_create:
 
                log.debug('writing %s hook file !' % h_type)
 
                with open(_hook_file, 'wb') as f:
 
                    tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
 
                    f.write(tmpl)
 
                os.chmod(_hook_file, 0755)
 
            else:
 
                log.debug('skipping writing hook file')
0 comments (0 inline, 0 general)