Changeset - 8182ebed2922
[Not reviewed]
beta
0 9 0
Marcin Kuzminski - 13 years ago 2013-01-05 01:58:29
marcin@python-works.com
Added full last changeset info to lightweight dashboard
9 files changed with 79 insertions and 12 deletions:
0 comments (0 inline, 0 general)
rhodecode/controllers/home.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.controllers.home
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Home controller for Rhodecode
 

	
 
    :created_on: Feb 18, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import logging
 

	
 
from pylons import tmpl_context as c, request
 
from pylons.i18n.translation import _
 
from webob.exc import HTTPBadRequest
 

	
 
import rhodecode
 
from rhodecode.lib import helpers as h
 
from rhodecode.lib.ext_json import json
 
from rhodecode.lib.auth import LoginRequired
 
from rhodecode.lib.base import BaseController, render
 
from rhodecode.model.db import Repository
 
from sqlalchemy.sql.expression import func
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class HomeController(BaseController):
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(HomeController, self).__before__()
 

	
 
    def index(self):
 
        c.groups = self.scm_model.get_repos_groups()
 
        c.group = None
 

	
 
        if c.visual.lightweight_dashboard is False:
 
            c.repos_list = self.scm_model.get_repos()
 
        ## lightweight version of dashboard
 
        else:
 
            c.repos_list = Repository.query()\
 
                            .filter(Repository.group_id == None)\
 
                            .order_by(func.lower(Repository.repo_name))\
 
                            .all()
 
            repos_data = []
 
            total_records = len(c.repos_list)
 

	
 
            _tmpl_lookup = rhodecode.CONFIG['pylons.app_globals'].mako_lookup
 
            template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
            quick_menu = lambda repo_name: (template.get_def("quick_menu")
 
                                            .render(repo_name, _=_, h=h, c=c))
 
            repo_lnk = lambda name, rtype, private, fork_of: (
 
                template.get_def("repo_name")
 
                .render(name, rtype, private, fork_of, short_name=False,
 
                        admin=False, _=_, h=h, c=c))
 
            last_change = lambda last_change:  (template.get_def("last_change")
 
                                           .render(last_change, _=_, h=h, c=c))
 
            rss_lnk = lambda repo_name: (template.get_def("rss")
 
                                           .render(repo_name, _=_, h=h, c=c))
 
            atom_lnk = lambda repo_name: (template.get_def("atom")
 
                                           .render(repo_name, _=_, h=h, c=c))
 
            tip = lambda repo_name, cs_cache: (template.get_def("revision")
 
                                    .render(repo_name,
 
                                            cs_cache.get('revision'),
 
                                            cs_cache.get('raw_id'),
 
                                            cs_cache.get('author'),
 
                                            cs_cache.get('message'), _=_, h=h,
 
                                            c=c))
 

	
 
            def desc(desc):
 
                if c.visual.stylify_metatags:
 
                    return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
 
                else:
 
                    return h.urlify_text(h.truncate(desc, 60))
 

	
 
            for repo in c.repos_list:
 
                repos_data.append({
 
                    "menu": quick_menu(repo.repo_name),
 
                    "raw_name": repo.repo_name.lower(),
 
                    "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                     repo.private, repo.fork),
 
                    "last_change": last_change(repo.last_db_change),
 
                    "tip": tip(repo.repo_name, repo.changeset_cache),
 
                    "desc": desc(repo.description),
 
                    "owner": h.person(repo.user.username),
 
                    "rss": rss_lnk(repo.repo_name),
 
                    "atom": atom_lnk(repo.repo_name),
 
                })
 

	
 
            c.data = json.dumps({
 
                "totalRecords": total_records,
 
                "startIndex": 0,
 
                "sort": "name",
 
                "dir": "asc",
 
                "records": repos_data
 
            })
 

	
 
        return render('/index.html')
 

	
 
    def repo_switcher(self):
 
        if request.is_xhr:
 
            all_repos = Repository.query().order_by(Repository.repo_name).all()
 
            c.repos_list = self.scm_model.get_repos(all_repos,
 
                                                    sort_key='name_sort',
 
                                                    simple=True)
 
            return render('/repo_switcher_list.html')
 
        else:
 
            raise HTTPBadRequest()
 

	
 
    def branch_tag_switcher(self, repo_name):
 
        if request.is_xhr:
 
            c.rhodecode_db_repo = Repository.get_by_repo_name(c.repo_name)
 
            c.rhodecode_repo = c.rhodecode_db_repo.scm_instance
 
            return render('/switch_to_list.html')
 
        else:
 
            raise HTTPBadRequest()
rhodecode/lib/base.py
Show inline comments
 
"""The base Controller API
 

	
 
Provides the BaseController class for subclassing.
 
"""
 
import logging
 
import time
 
import traceback
 

	
 
from paste.auth.basic import AuthBasicAuthenticator
 
from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden
 
from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
 

	
 
from pylons import config, tmpl_context as c, request, session, url
 
from pylons.controllers import WSGIController
 
from pylons.controllers.util import redirect
 
from pylons.templating import render_mako as render
 

	
 
from rhodecode import __version__, BACKENDS
 

	
 
from rhodecode.lib.utils2 import str2bool, safe_unicode, AttributeDict,\
 
    safe_str, safe_int
 
from rhodecode.lib.auth import AuthUser, get_container_username, authfunc,\
 
    HasPermissionAnyMiddleware, CookieStoreWrapper
 
from rhodecode.lib.utils import get_repo_slug, invalidate_cache
 
from rhodecode.model import meta
 

	
 
from rhodecode.model.db import Repository, RhodeCodeUi, User, RhodeCodeSetting
 
from rhodecode.model.notification import NotificationModel
 
from rhodecode.model.scm import ScmModel
 
from rhodecode.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _get_ip_addr(environ):
 
    proxy_key = 'HTTP_X_REAL_IP'
 
    proxy_key2 = 'HTTP_X_FORWARDED_FOR'
 
    def_key = 'REMOTE_ADDR'
 

	
 
    ip = environ.get(proxy_key2)
 
    if ip:
 
        return ip
 

	
 
    ip = environ.get(proxy_key)
 

	
 
    if ip:
 
        return ip
 

	
 
    ip = environ.get(def_key, '0.0.0.0')
 
    return ip
 

	
 

	
 
def _get_access_path(environ):
 
    path = environ.get('PATH_INFO')
 
    org_req = environ.get('pylons.original_request')
 
    if org_req:
 
        path = org_req.environ.get('PATH_INFO')
 
    return path
 

	
 

	
 
class BasicAuth(AuthBasicAuthenticator):
 

	
 
    def __init__(self, realm, authfunc, auth_http_code=None):
 
        self.realm = realm
 
        self.authfunc = authfunc
 
        self._rc_auth_http_code = auth_http_code
 

	
 
    def build_authentication(self):
 
        head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
 
        if self._rc_auth_http_code and self._rc_auth_http_code == '403':
 
            # return 403 if alternative http return code is specified in
 
            # RhodeCode config
 
            return HTTPForbidden(headers=head)
 
        return HTTPUnauthorized(headers=head)
 

	
 
    def authenticate(self, environ):
 
        authorization = AUTHORIZATION(environ)
 
        if not authorization:
 
            return self.build_authentication()
 
        (authmeth, auth) = authorization.split(' ', 1)
 
        if 'basic' != authmeth.lower():
 
            return self.build_authentication()
 
        auth = auth.strip().decode('base64')
 
        _parts = auth.split(':', 1)
 
        if len(_parts) == 2:
 
            username, password = _parts
 
            if self.authfunc(environ, username, password):
 
                return username
 
        return self.build_authentication()
 

	
 
    __call__ = authenticate
 

	
 

	
 
class BaseVCSController(object):
 

	
 
    def __init__(self, application, config):
 
        self.application = application
 
        self.config = config
 
        # base path of repo locations
 
        self.basepath = self.config['base_path']
 
        #authenticate this mercurial request using authfunc
 
        self.authenticate = BasicAuth('', authfunc,
 
                                      config.get('auth_ret_code'))
 
        self.ip_addr = '0.0.0.0'
 

	
 
    def _handle_request(self, environ, start_response):
 
        raise NotImplementedError()
 

	
 
    def _get_by_id(self, repo_name):
 
        """
 
        Get's a special pattern _<ID> from clone url and tries to replace it
 
        with a repository_name for support of _<ID> non changable urls
 

	
 
        :param repo_name:
 
        """
 
        try:
 
            data = repo_name.split('/')
 
            if len(data) >= 2:
 
                by_id = data[1].split('_')
 
                if len(by_id) == 2 and by_id[1].isdigit():
 
                    _repo_name = Repository.get(by_id[1]).repo_name
 
                    data[1] = _repo_name
 
        except:
 
            log.debug('Failed to extract repo_name from id %s' % (
 
                      traceback.format_exc()
 
                      )
 
            )
 

	
 
        return '/'.join(data)
 

	
 
    def _invalidate_cache(self, repo_name):
 
        """
 
        Set's cache for this repository for invalidation on next access
 

	
 
        :param repo_name: full repo name, also a cache key
 
        """
 
        invalidate_cache('get_repo_cached_%s' % repo_name)
 

	
 
    def _check_permission(self, action, user, repo_name, ip_addr=None):
 
        """
 
        Checks permissions using action (push/pull) user and repository
 
        name
 

	
 
        :param action: push or pull action
 
        :param user: user instance
 
        :param repo_name: repository name
 
        """
 
        #check IP
 
        authuser = AuthUser(user_id=user.user_id, ip_addr=ip_addr)
 
        if not authuser.ip_allowed:
 
            return False
 
        else:
 
            log.info('Access for IP:%s allowed' % (ip_addr))
 
        if action == 'push':
 
            if not HasPermissionAnyMiddleware('repository.write',
 
                                              'repository.admin')(user,
 
                                                                  repo_name):
 
                return False
 

	
 
        else:
 
            #any other action need at least read permission
 
            if not HasPermissionAnyMiddleware('repository.read',
 
                                              'repository.write',
 
                                              'repository.admin')(user,
 
                                                                  repo_name):
 
                return False
 

	
 
        return True
 

	
 
    def _get_ip_addr(self, environ):
 
        return _get_ip_addr(environ)
 

	
 
    def _check_ssl(self, environ, start_response):
 
        """
 
        Checks the SSL check flag and returns False if SSL is not present
 
        and required True otherwise
 
        """
 
        org_proto = environ['wsgi._org_proto']
 
        #check if we have SSL required  ! if not it's a bad request !
 
        require_ssl = str2bool(RhodeCodeUi.get_by_key('push_ssl').ui_value)
 
        if require_ssl and org_proto == 'http':
 
            log.debug('proto is %s and SSL is required BAD REQUEST !'
 
                      % org_proto)
 
            return False
 
        return True
 

	
 
    def _check_locking_state(self, environ, action, repo, user_id):
 
        """
 
        Checks locking on this repository, if locking is enabled and lock is
 
        present returns a tuple of make_lock, locked, locked_by.
 
        make_lock can have 3 states None (do nothing) True, make lock
 
        False release lock, This value is later propagated to hooks, which
 
        do the locking. Think about this as signals passed to hooks what to do.
 

	
 
        """
 
        locked = False  # defines that locked error should be thrown to user
 
        make_lock = None
 
        repo = Repository.get_by_repo_name(repo)
 
        user = User.get(user_id)
 

	
 
        # this is kind of hacky, but due to how mercurial handles client-server
 
        # server see all operation on changeset; bookmarks, phases and
 
        # obsolescence marker in different transaction, we don't want to check
 
        # locking on those
 
        obsolete_call = environ['QUERY_STRING'] in ['cmd=listkeys',]
 
        locked_by = repo.locked
 
        if repo and repo.enable_locking and not obsolete_call:
 
            if action == 'push':
 
                #check if it's already locked !, if it is compare users
 
                user_id, _date = repo.locked
 
                if user.user_id == user_id:
 
                    log.debug('Got push from user %s, now unlocking' % (user))
 
                    # unlock if we have push from user who locked
 
                    make_lock = False
 
                else:
 
                    # we're not the same user who locked, ban with 423 !
 
                    locked = True
 
            if action == 'pull':
 
                if repo.locked[0] and repo.locked[1]:
 
                    locked = True
 
                else:
 
                    log.debug('Setting lock on repo %s by %s' % (repo, user))
 
                    make_lock = True
 

	
 
        else:
 
            log.debug('Repository %s do not have locking enabled' % (repo))
 
        log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s'
 
                  % (make_lock, locked, locked_by))
 
        return make_lock, locked, locked_by
 

	
 
    def __call__(self, environ, start_response):
 
        start = time.time()
 
        try:
 
            return self._handle_request(environ, start_response)
 
        finally:
 
            log = logging.getLogger('rhodecode.' + self.__class__.__name__)
 
            log.debug('Request time: %.3fs' % (time.time() - start))
 
            meta.Session.remove()
 

	
 

	
 
class BaseController(WSGIController):
 

	
 
    def __before__(self):
 
        """
 
        __before__ is called before controller methods and after __call__
 
        """
 
        c.rhodecode_version = __version__
 
        c.rhodecode_instanceid = config.get('instance_id')
 
        c.rhodecode_name = config.get('rhodecode_title')
 
        c.use_gravatar = str2bool(config.get('use_gravatar'))
 
        c.ga_code = config.get('rhodecode_ga_code')
 
        # Visual options
 
        c.visual = AttributeDict({})
 
        rc_config = RhodeCodeSetting.get_app_settings()
 

	
 
        c.visual.show_public_icon = str2bool(rc_config.get('rhodecode_show_public_icon'))
 
        c.visual.show_private_icon = str2bool(rc_config.get('rhodecode_show_private_icon'))
 
        c.visual.stylify_metatags = str2bool(rc_config.get('rhodecode_stylify_metatags'))
 
        c.visual.lightweight_dashboard = str2bool(rc_config.get('rhodecode_lightweight_dashboard'))
 
        c.visual.lightweight_dashboard_items = safe_int(config.get('dashboard_items', 100))
 

	
 
        c.repo_name = get_repo_slug(request)
 
        c.backends = BACKENDS.keys()
 
        c.unread_notifications = NotificationModel()\
 
                        .get_unread_cnt_for_user(c.rhodecode_user.user_id)
 
        self.cut_off_limit = int(config.get('cut_off_limit'))
 

	
 
        self.sa = meta.Session
 
        self.scm_model = ScmModel(self.sa)
 

	
 
    def __call__(self, environ, start_response):
 
        """Invoke the Controller"""
 
        # WSGIController.__call__ dispatches to the Controller method
 
        # the request is routed to. This routing information is
 
        # available in environ['pylons.routes_dict']
 
        start = time.time()
 
        try:
 
            self.ip_addr = _get_ip_addr(environ)
 
            # make sure that we update permissions each time we call controller
 
            api_key = request.GET.get('api_key')
 
            cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
 
            user_id = cookie_store.get('user_id', None)
 
            username = get_container_username(environ, config)
 
            auth_user = AuthUser(user_id, api_key, username, self.ip_addr)
 
            request.user = auth_user
 
            self.rhodecode_user = c.rhodecode_user = auth_user
 
            if not self.rhodecode_user.is_authenticated and \
 
                       self.rhodecode_user.user_id is not None:
 
                self.rhodecode_user.set_authenticated(
 
                    cookie_store.get('is_authenticated')
 
                )
 
            log.info('IP: %s User: %s accessed %s' % (
 
               self.ip_addr, auth_user, safe_unicode(_get_access_path(environ)))
 
            )
 
            return WSGIController.__call__(self, environ, start_response)
 
        finally:
 
            log.info('IP: %s Request to %s time: %.3fs' % (
 
                _get_ip_addr(environ),
 
                safe_unicode(_get_access_path(environ)), time.time() - start)
 
            )
 
            meta.Session.remove()
 

	
 

	
 
class BaseRepoController(BaseController):
 
    """
 
    Base class for controllers responsible for loading all needed data for
 
    repository loaded items are
 

	
 
    c.rhodecode_repo: instance of scm repository
 
    c.rhodecode_db_repo: instance of db
 
    c.repository_followers: number of followers
 
    c.repository_forks: number of forks
 
    """
 

	
 
    def __before__(self):
 
        super(BaseRepoController, self).__before__()
 
        if c.repo_name:
 

	
 
            dbr = c.rhodecode_db_repo = Repository.get_by_repo_name(c.repo_name)
 
            c.rhodecode_repo = c.rhodecode_db_repo.scm_instance
 
            # update last change according to VCS data
 
            dbr.update_last_change(c.rhodecode_repo.last_change)
 
            dbr.update_changeset_cache(dbr.get_changeset())
 
            if c.rhodecode_repo is None:
 
                log.error('%s this repository is present in database but it '
 
                          'cannot be created as an scm instance', c.repo_name)
 

	
 
                redirect(url('home'))
 

	
 
            # some globals counter for menu
 
            c.repository_followers = self.scm_model.get_followers(dbr)
 
            c.repository_forks = self.scm_model.get_forks(dbr)
 
            c.repository_pull_requests = self.scm_model.get_pull_requests(dbr)
rhodecode/lib/dbmigrate/versions/010_version_1_5_2.py
Show inline comments
 
import logging
 
import datetime
 

	
 
from sqlalchemy import *
 
from sqlalchemy.exc import DatabaseError
 
from sqlalchemy.orm import relation, backref, class_mapper, joinedload
 
from sqlalchemy.orm.session import Session
 
from sqlalchemy.ext.declarative import declarative_base
 

	
 
from rhodecode.lib.dbmigrate.migrate import *
 
from rhodecode.lib.dbmigrate.migrate.changeset import *
 

	
 
from rhodecode.model.meta import Base
 
from rhodecode.model import meta
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def upgrade(migrate_engine):
 
    """
 
    Upgrade operations go here.
 
    Don't create your own engine; bind migrate_engine to your metadata
 
    """
 
    #==========================================================================
 
    # USER LOGS
 
    #==========================================================================
 
    from rhodecode.lib.dbmigrate.schema.db_1_5_0 import UserIpMap
 
    tbl = UserIpMap.__table__
 
    tbl.create()
 

	
 
    #==========================================================================
 
    # REPOSITORIES
 
    #==========================================================================
 
    from rhodecode.lib.dbmigrate.schema.db_1_5_0 import Repository
 
    tbl = Repository.__table__
 
    changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True)
 
    # create username column
 
    changeset_cache.create(table=tbl)
 

	
 
    #fix cache data
 
    _Session = Session()
 
    ## after adding that column fix all usernames
 
    repositories = _Session.query(Repository).all()
 
    for entry in repositories:
 
        entry.update_changeset_cache()
 
    _Session.commit()
 

	
 

	
 
def downgrade(migrate_engine):
 
    meta = MetaData()
 
    meta.bind = migrate_engine
rhodecode/lib/update_repoinfo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    package.rhodecode.lib.cleanup
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    :created_on: Jul 14, 2012
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import re
 
import shutil
 
import logging
 
import datetime
 
import string
 

	
 
from os.path import dirname as dn, join as jn
 
from rhodecode.model import init_model
 
from rhodecode.lib.utils2 import engine_from_config, safe_str
 
from rhodecode.model.db import RhodeCodeUi, Repository
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 

	
 

	
 
#to get the rhodecode import
 
sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
 

	
 
from rhodecode.lib.utils import BasePasterCommand, Command, add_cache
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UpdateCommand(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    summary = "Cleanup deleted repos"
 
    group_name = "RhodeCode"
 
    takes_config_file = -1
 
    parser = Command.standard_parser(verbose=True)
 

	
 
    def command(self):
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        from pylons import config
 

	
 
        #get to remove repos !!
 
        add_cache(config)
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 

	
 
        repo_update_list = map(string.strip,
 
                               self.options.repo_update_list.split(',')) \
 
                               if self.options.repo_update_list else None
 

	
 
        if repo_update_list:
 
            repo_list = Repository.query().filter(Repository.repo_name.in_(repo_update_list))
 
        else:
 
            repo_list = Repository.getAll()
 
        for repo in repo_list:
 
            last_change = (repo.scm_instance.last_change if repo.scm_instance
 
                           else datetime.datetime.utcfromtimestamp(0))
 
            repo.update_last_change(last_change)
 
            last_cs = (repo.scm_instance.get_changeset() if repo.scm_instance
 
                           else EmptyChangeset())
 
            repo.update_changeset_cache(last_cs)
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--update-only',
 
                          action='store',
 
                          dest='repo_update_list',
 
                          help="Specifies a comma separated list of repositores "
 
                                "to update last commit info for. OPTIONAL",
 
                          )
rhodecode/lib/utils.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.utils
 
    ~~~~~~~~~~~~~~~~~~~
 

	
 
    Utilities library for RhodeCode
 

	
 
    :created_on: Apr 18, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import re
 
import logging
 
import datetime
 
import traceback
 
import paste
 
import beaker
 
import tarfile
 
import shutil
 
import decorator
 
import warnings
 
from os.path import abspath
 
from os.path import dirname as dn, join as jn
 

	
 
from paste.script.command import Command, BadCommand
 

	
 
from mercurial import ui, config
 

	
 
from webhelpers.text import collapse, remove_formatting, strip_tags
 

	
 
from rhodecode.lib.vcs import get_backend
 
from rhodecode.lib.vcs.backends.base import BaseChangeset
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.helpers import get_scm
 
from rhodecode.lib.vcs.exceptions import VCSError
 

	
 
from rhodecode.lib.caching_query import FromCache
 

	
 
from rhodecode.model import meta
 
from rhodecode.model.db import Repository, User, RhodeCodeUi, \
 
    UserLog, RepoGroup, RhodeCodeSetting, CacheInvalidation
 
from rhodecode.model.meta import Session
 
from rhodecode.model.repos_group import ReposGroupModel
 
from rhodecode.lib.utils2 import safe_str, safe_unicode
 
from rhodecode.lib.vcs.utils.fakemod import create_module
 

	
 
log = logging.getLogger(__name__)
 

	
 
REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
 

	
 

	
 
def recursive_replace(str_, replace=' '):
 
    """
 
    Recursive replace of given sign to just one instance
 

	
 
    :param str_: given string
 
    :param replace: char to find and replace multiple instances
 

	
 
    Examples::
 
    >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
 
    'Mighty-Mighty-Bo-sstones'
 
    """
 

	
 
    if str_.find(replace * 2) == -1:
 
        return str_
 
    else:
 
        str_ = str_.replace(replace * 2, replace)
 
        return recursive_replace(str_, replace)
 

	
 

	
 
def repo_name_slug(value):
 
    """
 
    Return slug of name of repository
 
    This function is called on each creation/modification
 
    of repository to prevent bad names in repo
 
    """
 

	
 
    slug = remove_formatting(value)
 
    slug = strip_tags(slug)
 

	
 
    for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
 
        slug = slug.replace(c, '-')
 
    slug = recursive_replace(slug, '-')
 
    slug = collapse(slug, '-')
 
    return slug
 

	
 

	
 
def get_repo_slug(request):
 
    _repo = request.environ['pylons.routes_dict'].get('repo_name')
 
    if _repo:
 
        _repo = _repo.rstrip('/')
 
    return _repo
 

	
 

	
 
def get_repos_group_slug(request):
 
    _group = request.environ['pylons.routes_dict'].get('group_name')
 
    if _group:
 
        _group = _group.rstrip('/')
 
    return _group
 

	
 

	
 
def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
 
    """
 
    Action logger for various actions made by users
 

	
 
    :param user: user that made this action, can be a unique username string or
 
        object containing user_id attribute
 
    :param action: action to log, should be on of predefined unique actions for
 
        easy translations
 
    :param repo: string name of repository or object containing repo_id,
 
        that action was made on
 
    :param ipaddr: optional ip address from what the action was made
 
    :param sa: optional sqlalchemy session
 

	
 
    """
 

	
 
    if not sa:
 
        sa = meta.Session()
 

	
 
    try:
 
        if hasattr(user, 'user_id'):
 
            user_obj = User.get(user.user_id)
 
        elif isinstance(user, basestring):
 
            user_obj = User.get_by_username(user)
 
        else:
 
            raise Exception('You have to provide a user object or a username')
 

	
 
        if hasattr(repo, 'repo_id'):
 
            repo_obj = Repository.get(repo.repo_id)
 
            repo_name = repo_obj.repo_name
 
        elif  isinstance(repo, basestring):
 
            repo_name = repo.lstrip('/')
 
            repo_obj = Repository.get_by_repo_name(repo_name)
 
        else:
 
            repo_obj = None
 
            repo_name = ''
 

	
 
        user_log = UserLog()
 
        user_log.user_id = user_obj.user_id
 
        user_log.username = user_obj.username
 
        user_log.action = safe_unicode(action)
 

	
 
        user_log.repository = repo_obj
 
        user_log.repository_name = repo_name
 

	
 
        user_log.action_date = datetime.datetime.now()
 
        user_log.user_ip = ipaddr
 
        sa.add(user_log)
 

	
 
        log.info('Logging action %s on %s by %s' %
 
                 (action, safe_unicode(repo), user_obj))
 
        if commit:
 
            sa.commit()
 
    except:
 
        log.error(traceback.format_exc())
 
        raise
 

	
 

	
 
def get_repos(path, recursive=False):
 
    """
 
    Scans given path for repos and return (name,(type,path)) tuple
 

	
 
    :param path: path to scan for repositories
 
    :param recursive: recursive search and return names with subdirs in front
 
    """
 

	
 
    # remove ending slash for better results
 
    path = path.rstrip(os.sep)
 

	
 
    def _get_repos(p):
 
        if not os.access(p, os.W_OK):
 
            return
 
        for dirpath in os.listdir(p):
 
            if os.path.isfile(os.path.join(p, dirpath)):
 
                continue
 
            cur_path = os.path.join(p, dirpath)
 
            try:
 
                scm_info = get_scm(cur_path)
 
                yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
 
            except VCSError:
 
                if not recursive:
 
                    continue
 
                #check if this dir containts other repos for recursive scan
 
                rec_path = os.path.join(p, dirpath)
 
                if os.path.isdir(rec_path):
 
                    for inner_scm in _get_repos(rec_path):
 
                        yield inner_scm
 

	
 
    return _get_repos(path)
 

	
 

	
 
def is_valid_repo(repo_name, base_path, scm=None):
 
    """
 
    Returns True if given path is a valid repository False otherwise.
 
    If scm param is given also compare if given scm is the same as expected
 
    from scm parameter
 

	
 
    :param repo_name:
 
    :param base_path:
 
    :param scm:
 

	
 
    :return True: if given path is a valid repository
 
    """
 
    full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
 

	
 
    try:
 
        scm_ = get_scm(full_path)
 
        if scm:
 
            return scm_[0] == scm
 
        return True
 
    except VCSError:
 
        return False
 

	
 

	
 
def is_valid_repos_group(repos_group_name, base_path):
 
    """
 
    Returns True if given path is a repos group False otherwise
 

	
 
    :param repo_name:
 
    :param base_path:
 
    """
 
    full_path = os.path.join(safe_str(base_path), safe_str(repos_group_name))
 

	
 
    # check if it's not a repo
 
    if is_valid_repo(repos_group_name, base_path):
 
        return False
 

	
 
    try:
 
        # we need to check bare git repos at higher level
 
        # since we might match branches/hooks/info/objects or possible
 
        # other things inside bare git repo
 
        get_scm(os.path.dirname(full_path))
 
        return False
 
    except VCSError:
 
        pass
 

	
 
    # check if it's a valid path
 
    if os.path.isdir(full_path):
 
        return True
 

	
 
    return False
 

	
 

	
 
def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
 
    while True:
 
        ok = raw_input(prompt)
 
        if ok in ('y', 'ye', 'yes'):
 
            return True
 
        if ok in ('n', 'no', 'nop', 'nope'):
 
            return False
 
        retries = retries - 1
 
        if retries < 0:
 
            raise IOError
 
        print complaint
 

	
 
#propagated from mercurial documentation
 
ui_sections = ['alias', 'auth',
 
                'decode/encode', 'defaults',
 
                'diff', 'email',
 
                'extensions', 'format',
 
                'merge-patterns', 'merge-tools',
 
                'hooks', 'http_proxy',
 
                'smtp', 'patch',
 
                'paths', 'profiling',
 
                'server', 'trusted',
 
                'ui', 'web', ]
 

	
 

	
 
def make_ui(read_from='file', path=None, checkpaths=True, clear_session=True):
 
    """
 
    A function that will read python rc files or database
 
    and make an mercurial ui object from read options
 

	
 
    :param path: path to mercurial config file
 
    :param checkpaths: check the path
 
    :param read_from: read from 'file' or 'db'
 
    """
 

	
 
    baseui = ui.ui()
 

	
 
    # clean the baseui object
 
    baseui._ocfg = config.config()
 
    baseui._ucfg = config.config()
 
    baseui._tcfg = config.config()
 

	
 
    if read_from == 'file':
 
        if not os.path.isfile(path):
 
            log.debug('hgrc file is not present at %s, skipping...' % path)
 
            return False
 
        log.debug('reading hgrc from %s' % path)
 
        cfg = config.config()
 
        cfg.read(path)
 
        for section in ui_sections:
 
            for k, v in cfg.items(section):
 
                log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
 
                baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
 

	
 
    elif read_from == 'db':
 
        sa = meta.Session()
 
        ret = sa.query(RhodeCodeUi)\
 
            .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
 
            .all()
 

	
 
        hg_ui = ret
 
        for ui_ in hg_ui:
 
            if ui_.ui_active:
 
                log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
 
                          ui_.ui_key, ui_.ui_value)
 
                baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
 
                                 safe_str(ui_.ui_value))
 
            if ui_.ui_key == 'push_ssl':
 
                # force set push_ssl requirement to False, rhodecode
 
                # handles that
 
                baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
 
                                 False)
 
        if clear_session:
 
            meta.Session.remove()
 
    return baseui
 

	
 

	
 
def set_rhodecode_config(config):
 
    """
 
    Updates pylons config with new settings from database
 

	
 
    :param config:
 
    """
 
    hgsettings = RhodeCodeSetting.get_app_settings()
 

	
 
    for k, v in hgsettings.items():
 
        config[k] = v
 

	
 

	
 
def invalidate_cache(cache_key, *args):
 
    """
 
    Puts cache invalidation task into db for
 
    further global cache invalidation
 
    """
 

	
 
    from rhodecode.model.scm import ScmModel
 

	
 
    if cache_key.startswith('get_repo_cached_'):
 
        name = cache_key.split('get_repo_cached_')[-1]
 
        ScmModel().mark_for_invalidation(name)
 

	
 

	
 
def map_groups(path):
 
    """
 
    Given a full path to a repository, create all nested groups that this
 
    repo is inside. This function creates parent-child relationships between
 
    groups and creates default perms for all new groups.
 

	
 
    :param paths: full path to repository
 
    """
 
    sa = meta.Session()
 
    groups = path.split(Repository.url_sep())
 
    parent = None
 
    group = None
 

	
 
    # last element is repo in nested groups structure
 
    groups = groups[:-1]
 
    rgm = ReposGroupModel(sa)
 
    for lvl, group_name in enumerate(groups):
 
        group_name = '/'.join(groups[:lvl] + [group_name])
 
        group = RepoGroup.get_by_group_name(group_name)
 
        desc = '%s group' % group_name
 

	
 
        # skip folders that are now removed repos
 
        if REMOVED_REPO_PAT.match(group_name):
 
            break
 

	
 
        if group is None:
 
            log.debug('creating group level: %s group_name: %s' % (lvl,
 
                                                                   group_name))
 
            group = RepoGroup(group_name, parent)
 
            group.group_description = desc
 
            sa.add(group)
 
            rgm._create_default_perms(group)
 
            sa.flush()
 
        parent = group
 
    return group
 

	
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False,
 
                   install_git_hook=False):
 
    """
 
    maps all repos given in initial_repo_list, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_list and removes them.
 

	
 
    :param initial_repo_list: list of repositories found by scanning methods
 
    :param remove_obsolete: check for obsolete entries in database
 
    :param install_git_hook: if this is True, also check and install githook
 
        for a repo if missing
 
    """
 
    from rhodecode.model.repo import RepoModel
 
    from rhodecode.model.scm import ScmModel
 
    sa = meta.Session()
 
    rm = RepoModel()
 
    user = sa.query(User).filter(User.admin == True).first()
 
    if user is None:
 
        raise Exception('Missing administrative account!')
 
    added = []
 

	
 
#    # clear cache keys
 
#    log.debug("Clearing cache keys now...")
 
#    CacheInvalidation.clear_cache()
 
#    sa.commit()
 

	
 
    ##creation defaults
 
    defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 
    private = defs.get('repo_private')
 

	
 
    for name, repo in initial_repo_list.items():
 
        group = map_groups(name)
 
        db_repo = rm.get_by_repo_name(name)
 
        # found repo that is on filesystem not in RhodeCode database
 
        if not db_repo:
 
            log.info('repository %s not found, creating now' % name)
 
            added.append(name)
 
            desc = (repo.description
 
                    if repo.description != 'unknown'
 
                    else '%s repository' % name)
 

	
 
            new_repo = rm.create_repo(
 
                repo_name=name,
 
                repo_type=repo.alias,
 
                description=desc,
 
                repos_group=getattr(group, 'group_id', None),
 
                owner=user,
 
                just_db=True,
 
                enable_locking=enable_locking,
 
                enable_downloads=enable_downloads,
 
                enable_statistics=enable_statistics,
 
                private=private
 
            )
 
            # we added that repo just now, and make sure it has githook
 
            # installed
 
            if new_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(new_repo.scm_instance)
 
            new_repo.update_changeset_cache()
 
        elif install_git_hook:
 
            if db_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(db_repo.scm_instance)
 
        # during starting install all cache keys for all repositories in the
 
        # system, this will register all repos and multiple instances
 
        key, _prefix, _org_key = CacheInvalidation._get_key(name)
 
        CacheInvalidation.invalidate(name)
 
        log.debug("Creating a cache key for %s, instance_id %s"
 
                  % (name, _prefix or 'unknown'))
 

	
 
    sa.commit()
 
    removed = []
 
    if remove_obsolete:
 
        # remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                log.debug("Removing non-existing repository found in db `%s`" %
 
                          repo.repo_name)
 
                try:
 
                    sa.delete(repo)
 
                    sa.commit()
 
                    removed.append(repo.repo_name)
 
                except:
 
                    #don't hold further removals on error
 
                    log.error(traceback.format_exc())
 
                    sa.rollback()
 

	
 
    return added, removed
 

	
 

	
 
# set cache regions for beaker so celery can utilise it
 
def add_cache(settings):
 
    cache_settings = {'regions': None}
 
    for key in settings.keys():
 
        for prefix in ['beaker.cache.', 'cache.']:
 
            if key.startswith(prefix):
 
                name = key.split(prefix)[1].strip()
 
                cache_settings[name] = settings[key].strip()
 
    if cache_settings['regions']:
 
        for region in cache_settings['regions'].split(','):
 
            region = region.strip()
 
            region_settings = {}
 
            for key, value in cache_settings.items():
 
                if key.startswith(region):
 
                    region_settings[key.split('.')[1]] = value
 
            region_settings['expire'] = int(region_settings.get('expire',
 
                                                                60))
 
            region_settings.setdefault('lock_dir',
 
                                       cache_settings.get('lock_dir'))
 
            region_settings.setdefault('data_dir',
 
                                       cache_settings.get('data_dir'))
 

	
 
            if 'type' not in region_settings:
 
                region_settings['type'] = cache_settings.get('type',
 
                                                             'memory')
 
            beaker.cache.cache_regions[region] = region_settings
 

	
 

	
 
def load_rcextensions(root_path):
 
    import rhodecode
 
    from rhodecode.config import conf
 

	
 
    path = os.path.join(root_path, 'rcextensions', '__init__.py')
 
    if os.path.isfile(path):
 
        rcext = create_module('rc', path)
 
        EXT = rhodecode.EXTENSIONS = rcext
 
        log.debug('Found rcextensions now loading %s...' % rcext)
 

	
 
        # Additional mappings that are not present in the pygments lexers
 
        conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
 

	
 
        #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
 

	
 
        if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
 
            log.debug('settings custom INDEX_EXTENSIONS')
 
            conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
 

	
 
        #ADDITIONAL MAPPINGS
 
        log.debug('adding extra into INDEX_EXTENSIONS')
 
        conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
 

	
 

	
 
#==============================================================================
 
# TEST FUNCTIONS AND CREATORS
 
#==============================================================================
 
def create_test_index(repo_location, config, full_index):
 
    """
 
    Makes default test index
 

	
 
    :param config: test config
 
    :param full_index:
 
    """
 

	
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    from rhodecode.lib.pidlock import DaemonLock, LockHeld
 

	
 
    repo_location = repo_location
 

	
 
    index_location = os.path.join(config['app_conf']['index_dir'])
 
    if not os.path.exists(index_location):
 
        os.makedirs(index_location)
 

	
 
    try:
 
        l = DaemonLock(file_=jn(dn(index_location), 'make_index.lock'))
 
        WhooshIndexingDaemon(index_location=index_location,
 
                             repo_location=repo_location)\
 
            .run(full_index=full_index)
 
        l.release()
 
    except LockHeld:
 
        pass
 

	
 

	
 
def create_test_env(repos_test_path, config):
 
    """
 
    Makes a fresh database and
 
    install test repository into tmp dir
 
    """
 
    from rhodecode.lib.db_manage import DbManage
 
    from rhodecode.tests import HG_REPO, GIT_REPO, TESTS_TMP_PATH
 

	
 
    # PART ONE create db
 
    dbconf = config['sqlalchemy.db1.url']
 
    log.debug('making test db %s' % dbconf)
 

	
 
    # create test dir if it doesn't exist
 
    if not os.path.isdir(repos_test_path):
 
        log.debug('Creating testdir %s' % repos_test_path)
 
        os.makedirs(repos_test_path)
 

	
 
    dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
 
                        tests=True)
 
    dbmanage.create_tables(override=True)
 
    dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
 
    dbmanage.create_default_user()
 
    dbmanage.admin_prompt()
 
    dbmanage.create_permissions()
 
    dbmanage.populate_default_permissions()
 
    Session().commit()
 
    # PART TWO make test repo
 
    log.debug('making test vcs repositories')
 

	
 
    idx_path = config['app_conf']['index_dir']
 
    data_path = config['app_conf']['cache_dir']
 

	
 
    #clean index and data
 
    if idx_path and os.path.exists(idx_path):
 
        log.debug('remove %s' % idx_path)
 
        shutil.rmtree(idx_path)
 

	
 
    if data_path and os.path.exists(data_path):
 
        log.debug('remove %s' % data_path)
 
        shutil.rmtree(data_path)
 

	
 
    #CREATE DEFAULT TEST REPOS
 
    cur_dir = dn(dn(abspath(__file__)))
 
    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
 
    tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
 
    tar.close()
 

	
 
    cur_dir = dn(dn(abspath(__file__)))
 
    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_git.tar.gz"))
 
    tar.extractall(jn(TESTS_TMP_PATH, GIT_REPO))
 
    tar.close()
 

	
 
    #LOAD VCS test stuff
 
    from rhodecode.tests.vcs import setup_package
 
    setup_package()
 

	
 

	
 
#==============================================================================
 
# PASTER COMMANDS
 
#==============================================================================
 
class BasePasterCommand(Command):
 
    """
 
    Abstract Base Class for paster commands.
 

	
 
    The celery commands are somewhat aggressive about loading
 
    celery.conf, and since our module sets the `CELERY_LOADER`
 
    environment variable to our loader, we have to bootstrap a bit and
 
    make sure we've had a chance to load the pylons config off of the
 
    command line, otherwise everything fails.
 
    """
 
    min_args = 1
 
    min_args_error = "Please provide a paster config file as an argument."
 
    takes_config_file = 1
 
    requires_config_file = True
 

	
 
    def notify_msg(self, msg, log=False):
 
        """Make a notification to user, additionally if logger is passed
 
        it logs this action using given logger
 

	
 
        :param msg: message that will be printed to user
 
        :param log: logging instance, to use to additionally log this message
 

	
 
        """
 
        if log and isinstance(log, logging):
 
            log(msg)
 

	
 
    def run(self, args):
 
        """
 
        Overrides Command.run
 

	
 
        Checks for a config file argument and loads it.
 
        """
 
        if len(args) < self.min_args:
 
            raise BadCommand(
 
                self.min_args_error % {'min_args': self.min_args,
 
                                       'actual_args': len(args)})
 

	
 
        # Decrement because we're going to lob off the first argument.
 
        # @@ This is hacky
 
        self.min_args -= 1
 
        self.bootstrap_config(args[0])
 
        self.update_parser()
 
        return super(BasePasterCommand, self).run(args[1:])
 

	
 
    def update_parser(self):
 
        """
 
        Abstract method.  Allows for the class's parser to be updated
 
        before the superclass's `run` method is called.  Necessary to
 
        allow options/arguments to be passed through to the underlying
 
        celery command.
 
        """
 
        raise NotImplementedError("Abstract Method.")
 

	
 
    def bootstrap_config(self, conf):
 
        """
 
        Loads the pylons configuration.
 
        """
 
        from pylons import config as pylonsconfig
 

	
 
        self.path_to_ini_file = os.path.realpath(conf)
 
        conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
 
        pylonsconfig.init_app(conf.global_conf, conf.local_conf)
 

	
 

	
 
def check_git_version():
 
    """
 
    Checks what version of git is installed in system, and issues a warning
 
    if it's too old for RhodeCode to properly work.
 
    """
 
    import subprocess
 
    from distutils.version import StrictVersion
 
    from rhodecode import BACKENDS
 

	
 
    p = subprocess.Popen('git --version', shell=True,
 
                        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
 
    stdout, stderr = p.communicate()
 
    ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
 
    if len(ver.split('.')) > 3:
 
        #StrictVersion needs to be only 3 element type
 
        ver = '.'.join(ver.split('.')[:3])
 
    try:
 
        _ver = StrictVersion(ver)
 
    except:
 
        _ver = StrictVersion('0.0.0')
 
        stderr = traceback.format_exc()
 

	
 
    req_ver = '1.7.4'
 
    to_old_git = False
 
    if  _ver < StrictVersion(req_ver):
 
        to_old_git = True
 

	
 
    if 'git' in BACKENDS:
 
        log.debug('GIT version detected: %s' % stdout)
 
        if stderr:
 
            log.warning('Unable to detect git version org error was:%r' % stderr)
 
        elif to_old_git:
 
            log.warning('RhodeCode detected git version %s, which is too old '
 
                        'for the system to function properly. Make sure '
 
                        'its version is at least %s' % (ver, req_ver))
 
    return _ver
 

	
 

	
 
@decorator.decorator
 
def jsonify(func, *args, **kwargs):
 
    """Action decorator that formats output for JSON
 

	
 
    Given a function that will return content, this decorator will turn
 
    the result into JSON, with a content-type of 'application/json' and
 
    output it.
 

	
 
    """
 
    from pylons.decorators.util import get_pylons
 
    from rhodecode.lib.ext_json import json
 
    pylons = get_pylons(args)
 
    pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
 
    data = func(*args, **kwargs)
 
    if isinstance(data, (list, tuple)):
 
        msg = "JSON responses with Array envelopes are susceptible to " \
 
              "cross-site data leak attacks, see " \
 
              "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
 
        warnings.warn(msg, Warning, 2)
 
        log.warning(msg)
 
    log.debug("Returning JSON wrapped action output")
 
    return json.dumps(data, encoding='utf-8')
 
\ No newline at end of file
rhodecode/lib/vcs/backends/base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.base
 
    ~~~~~~~~~~~~~~~~~
 

	
 
    Base for all available scm backends
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 

	
 
from itertools import chain
 
from rhodecode.lib.vcs.utils import author_name, author_email
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.helpers import get_dict_for_attrs
 
from rhodecode.lib.vcs.conf import settings
 

	
 
from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
 
    NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, \
 
    NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, \
 
    RepositoryError
 

	
 

	
 
class BaseRepository(object):
 
    """
 
    Base Repository for final backends
 

	
 
    **Attributes**
 

	
 
        ``DEFAULT_BRANCH_NAME``
 
            name of default branch (i.e. "trunk" for svn, "master" for git etc.
 

	
 
        ``scm``
 
            alias of scm, i.e. *git* or *hg*
 

	
 
        ``repo``
 
            object from external api
 

	
 
        ``revisions``
 
            list of all available revisions' ids, in ascending order
 

	
 
        ``changesets``
 
            storage dict caching returned changesets
 

	
 
        ``path``
 
            absolute path to the repository
 

	
 
        ``branches``
 
            branches as list of changesets
 

	
 
        ``tags``
 
            tags as list of changesets
 
    """
 
    scm = None
 
    DEFAULT_BRANCH_NAME = None
 
    EMPTY_CHANGESET = '0' * 40
 

	
 
    def __init__(self, repo_path, create=False, **kwargs):
 
        """
 
        Initializes repository. Raises RepositoryError if repository could
 
        not be find at the given ``repo_path`` or directory at ``repo_path``
 
        exists and ``create`` is set to True.
 

	
 
        :param repo_path: local path of the repository
 
        :param create=False: if set to True, would try to craete repository.
 
        :param src_url=None: if set, should be proper url from which repository
 
          would be cloned; requires ``create`` parameter to be set to True -
 
          raises RepositoryError if src_url is set and create evaluates to
 
          False
 
        """
 
        raise NotImplementedError
 

	
 
    def __str__(self):
 
        return '<%s at %s>' % (self.__class__.__name__, self.path)
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __len__(self):
 
        return self.count()
 

	
 
    @LazyProperty
 
    def alias(self):
 
        for k, v in settings.BACKENDS.items():
 
            if v.split('.')[-1] == str(self.__class__.__name__):
 
                return k
 

	
 
    @LazyProperty
 
    def name(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def owner(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def description(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def size(self):
 
        """
 
        Returns combined size in bytes for all repository files
 
        """
 

	
 
        size = 0
 
        try:
 
            tip = self.get_changeset()
 
            for topnode, dirs, files in tip.walk('/'):
 
                for f in files:
 
                    size += tip.get_file_size(f.path)
 
                for dir in dirs:
 
                    for f in files:
 
                        size += tip.get_file_size(f.path)
 

	
 
        except RepositoryError, e:
 
            pass
 
        return size
 

	
 
    def is_valid(self):
 
        """
 
        Validates repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_last_change(self):
 
        self.get_changesets()
 

	
 
    #==========================================================================
 
    # CHANGESETS
 
    #==========================================================================
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns instance of ``Changeset`` class. If ``revision`` is None, most
 
        recent changeset is returned.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no revisions
 
        """
 
        raise NotImplementedError
 

	
 
    def __iter__(self):
 
        """
 
        Allows Repository objects to be iterated.
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for revision in self.revisions:
 
            yield self.get_changeset(revision)
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
                       end_date=None, branch_name=None, reverse=False):
 
        """
 
        Returns iterator of ``MercurialChangeset`` objects from start to end
 
        not inclusive This should behave just like a list, ie. end is not
 
        inclusive
 

	
 
        :param start: None or str
 
        :param end: None or str
 
        :param start_date:
 
        :param end_date:
 
        :param branch_name:
 
        :param reversed:
 
        """
 
        raise NotImplementedError
 

	
 
    def __getslice__(self, i, j):
 
        """
 
        Returns a iterator of sliced repository
 
        """
 
        for rev in self.revisions[i:j]:
 
            yield self.get_changeset(rev)
 

	
 
    def __getitem__(self, key):
 
        return self.get_changeset(key)
 

	
 
    def count(self):
 
        return len(self.revisions)
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None, **opts):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        raise NotImplementedError
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        raise NotImplementedError
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
            context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``.
 
        """
 
        raise NotImplementedError
 

	
 
    # ========== #
 
    # COMMIT API #
 
    # ========== #
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        """
 
        Returns ``InMemoryChangeset`` object for this repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def add(self, filenode, **kwargs):
 
        """
 
        Commit api function that will add given ``FileNode`` into this
 
        repository.
 

	
 
        :raises ``NodeAlreadyExistsError``: if there is a file with same path
 
          already in repository
 
        :raises ``NodeAlreadyAddedError``: if given node is already marked as
 
          *added*
 
        """
 
        raise NotImplementedError
 

	
 
    def remove(self, filenode, **kwargs):
 
        """
 
        Commit api function that will remove given ``FileNode`` into this
 
        repository.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no changesets yet
 
        :raises ``NodeDoesNotExistError``: if there is no file with given path
 
        """
 
        raise NotImplementedError
 

	
 
    def commit(self, message, **kwargs):
 
        """
 
        Persists current changes made on this repository and returns newly
 
        created changeset.
 

	
 
        :raises ``NothingChangedError``: if no changes has been made
 
        """
 
        raise NotImplementedError
 

	
 
    def get_state(self):
 
        """
 
        Returns dictionary with ``added``, ``changed`` and ``removed`` lists
 
        containing ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_config_value(self, section, name, config_file=None):
 
        """
 
        Returns configuration value for a given [``section``] and ``name``.
 

	
 
        :param section: Section we want to retrieve value from
 
        :param name: Name of configuration we want to retrieve
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    def get_user_name(self, config_file=None):
 
        """
 
        Returns user's name from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    def get_user_email(self, config_file=None):
 
        """
 
        Returns user's email from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    # =========== #
 
    # WORKDIR API #
 
    # =========== #
 

	
 
    @LazyProperty
 
    def workdir(self):
 
        """
 
        Returns ``Workdir`` instance for this repository.
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class BaseChangeset(object):
 
    """
 
    Each backend should implement it's changeset representation.
 

	
 
    **Attributes**
 

	
 
        ``repository``
 
            repository object within which changeset exists
 

	
 
        ``id``
 
            may be ``raw_id`` or i.e. for mercurial's tip just ``tip``
 

	
 
        ``raw_id``
 
            raw changeset representation (i.e. full 40 length sha for git
 
            backend)
 

	
 
        ``short_id``
 
            shortened (if apply) version of ``raw_id``; it would be simple
 
            shortcut for ``raw_id[:12]`` for git/mercurial backends or same
 
            as ``raw_id`` for subversion
 

	
 
        ``revision``
 
            revision number as integer
 

	
 
        ``files``
 
            list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
 

	
 
        ``dirs``
 
            list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
 

	
 
        ``nodes``
 
            combined list of ``Node`` objects
 

	
 
        ``author``
 
            author of the changeset, as unicode
 

	
 
        ``message``
 
            message of the changeset, as unicode
 

	
 
        ``parents``
 
            list of parent changesets
 

	
 
        ``last``
 
            ``True`` if this is last changeset in repository, ``False``
 
            otherwise; trying to access this attribute while there is no
 
            changesets would raise ``EmptyRepositoryError``
 
    """
 
    def __str__(self):
 
        return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
 
            self.short_id)
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __unicode__(self):
 
        return u'%s:%s' % (self.revision, self.short_id)
 

	
 
    def __eq__(self, other):
 
        return self.raw_id == other.raw_id
 

	
 
    def __json__(self):
 
        return dict(
 
            short_id=self.short_id,
 
            raw_id=self.raw_id,
 
            revision=self.revision,
 
            message=self.message,
 
            date=self.date,
 
            author=self.author,
 
        )
 

	
 
    @LazyProperty
 
    def last(self):
 
        if self.repository is None:
 
            raise ChangesetError("Cannot check if it's most recent revision")
 
        return self.raw_id == self.repository.revisions[-1]
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def id(self):
 
        """
 
        Returns string identifying this changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """
 
        Returns raw string identifying this changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        """
 
        Returns shortened version of ``raw_id`` attribute, as string,
 
        identifying this changeset, useful for web representation.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def revision(self):
 
        """
 
        Returns integer identifying this changeset.
 

	
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def commiter(self):
 
        """
 
        Returns Commiter for given commit
 
        """
 

	
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def commiter_name(self):
 
        """
 
        Returns Author name for given commit
 
        """
 

	
 
        return author_name(self.commiter)
 

	
 
    @LazyProperty
 
    def commiter_email(self):
 
        """
 
        Returns Author email address for given commit
 
        """
 

	
 
        return author_email(self.commiter)
 

	
 
    @LazyProperty
 
    def author(self):
 
        """
 
        Returns Author for given commit
 
        """
 

	
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def author_name(self):
 
        """
 
        Returns Author name for given commit
 
        """
 

	
 
        return author_name(self.author)
 

	
 
    @LazyProperty
 
    def author_email(self):
 
        """
 
        Returns Author email address for given commit
 
        """
 

	
 
        return author_email(self.author)
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_file_history(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_nodes(self, path):
 
        """
 
        Returns combined ``DirNode`` and ``FileNode`` objects list representing
 
        state of changeset at the given ``path``.
 

	
 
        :raises ``ChangesetError``: if node at the given ``path`` is not
 
          instance of ``DirNode``
 
        """
 
        raise NotImplementedError
 

	
 
    def get_node(self, path):
 
        """
 
        Returns ``Node`` object from the given ``path``.
 

	
 
        :raises ``NodeDoesNotExistError``: if there is no node at the given
 
          ``path``
 
        """
 
        raise NotImplementedError
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tar``, ``tgz``
 
            or ``tbz2``. Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash.
 

	
 
            repo-tip.<kind>
 
        """
 

	
 
        raise NotImplementedError
 

	
 
    def get_chunked_archive(self, **kwargs):
 
        """
 
        Returns iterable archive. Tiny wrapper around ``fill_archive`` method.
 

	
 
        :param chunk_size: extra parameter which controls size of returned
 
            chunks. Default:8k.
 
        """
 

	
 
        chunk_size = kwargs.pop('chunk_size', 8192)
 
        stream = kwargs.get('stream')
 
        self.fill_archive(**kwargs)
 
        while True:
 
            data = stream.read(chunk_size)
 
            if not data:
 
                break
 
            yield data
 

	
 
    @LazyProperty
 
    def root(self):
 
        """
 
        Returns ``RootNode`` object for this changeset.
 
        """
 
        return self.get_node('')
 

	
 
    def next(self, branch=None):
 
        """
 
        Returns next changeset from current, if branch is gives it will return
 
        next changeset belonging to this branch
 

	
 
        :param branch: show changesets within the given named branch
 
        """
 
        raise NotImplementedError
 

	
 
    def prev(self, branch=None):
 
        """
 
        Returns previous changeset from current, if branch is gives it will
 
        return previous changeset belonging to this branch
 

	
 
        :param branch: show changesets within the given named branch
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def size(self):
 
        """
 
        Returns total number of bytes from contents of all filenodes.
 
        """
 
        return sum((node.size for node in self.get_filenodes_generator()))
 

	
 
    def walk(self, topurl=''):
 
        """
 
        Similar to os.walk method. Insted of filesystem it walks through
 
        changeset starting at given ``topurl``.  Returns generator of tuples
 
        (topnode, dirnodes, filenodes).
 
        """
 
        topnode = self.get_node(topurl)
 
        yield (topnode, topnode.dirs, topnode.files)
 
        for dirnode in topnode.dirs:
 
            for tup in self.walk(dirnode.path):
 
                yield tup
 

	
 
    def get_filenodes_generator(self):
 
        """
 
        Returns generator that yields *all* file nodes.
 
        """
 
        for topnode, dirs, files in self.walk():
 
            for node in files:
 
                yield node
 

	
 
    def as_dict(self):
 
        """
 
        Returns dictionary with changeset's attributes and their values.
 
        """
 
        data = get_dict_for_attrs(self, ['id', 'raw_id', 'short_id',
 
            'revision', 'date', 'message'])
 
        data['author'] = {'name': self.author_name, 'email': self.author_email}
 
        data['added'] = [node.path for node in self.added]
 
        data['changed'] = [node.path for node in self.changed]
 
        data['removed'] = [node.path for node in self.removed]
 
        return data
 

	
 

	
 
class BaseWorkdir(object):
 
    """
 
    Working directory representation of single repository.
 

	
 
    :attribute: repository: repository object of working directory
 
    """
 

	
 
    def __init__(self, repository):
 
        self.repository = repository
 

	
 
    def get_branch(self):
 
        """
 
        Returns name of current branch.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_changeset(self):
 
        """
 
        Returns current changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_added(self):
 
        """
 
        Returns list of ``FileNode`` objects marked as *new* in working
 
        directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_changed(self):
 
        """
 
        Returns list of ``FileNode`` objects *changed* in working directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_removed(self):
 
        """
 
        Returns list of ``RemovedFileNode`` objects marked as *removed* in
 
        working directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_untracked(self):
 
        """
 
        Returns list of ``FileNode`` objects which are present within working
 
        directory however are not tracked by repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_status(self):
 
        """
 
        Returns dict with ``added``, ``changed``, ``removed`` and ``untracked``
 
        lists.
 
        """
 
        raise NotImplementedError
 

	
 
    def commit(self, message, **kwargs):
 
        """
 
        Commits local (from working directory) changes and returns newly
 
        created
 
        ``Changeset``. Updates repository's ``revisions`` list.
 

	
 
        :raises ``CommitError``: if any error occurs while committing
 
        """
 
        raise NotImplementedError
 

	
 
    def update(self, revision=None):
 
        """
 
        Fetches content of the given revision and populates it within working
 
        directory.
 
        """
 
        raise NotImplementedError
 

	
 
    def checkout_branch(self, branch=None):
 
        """
 
        Checks out ``branch`` or the backend's default branch.
 

	
 
        Raises ``BranchDoesNotExistError`` if the branch does not exist.
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class BaseInMemoryChangeset(object):
 
    """
 
    Represents differences between repository's state (most recent head) and
 
    changes made *in place*.
 

	
 
    **Attributes**
 

	
 
        ``repository``
 
            repository object for this in-memory-changeset
 

	
 
        ``added``
 
            list of ``FileNode`` objects marked as *added*
 

	
 
        ``changed``
 
            list of ``FileNode`` objects marked as *changed*
 

	
 
        ``removed``
 
            list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
 
            *removed*
 

	
 
        ``parents``
 
            list of ``Changeset`` representing parents of in-memory changeset.
 
            Should always be 2-element sequence.
 

	
 
    """
 

	
 
    def __init__(self, repository):
 
        self.repository = repository
 
        self.added = []
 
        self.changed = []
 
        self.removed = []
 
        self.parents = []
 

	
 
    def add(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` objects as *to be committed*.
 

	
 
        :raises ``NodeAlreadyExistsError``: if node with same path exists at
 
          latest changeset
 
        :raises ``NodeAlreadyAddedError``: if node with same path is already
 
          marked as *added*
 
        """
 
        # Check if not already marked as *added* first
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.added):
 
                raise NodeAlreadyAddedError("Such FileNode %s is already "
 
                    "marked for addition" % node.path)
 
        for node in filenodes:
 
            self.added.append(node)
 

	
 
    def change(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` objects to be *changed* in next commit.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no changesets yet
 
        :raises ``NodeAlreadyExistsError``: if node with same path is already
 
          marked to be *changed*
 
        :raises ``NodeAlreadyRemovedError``: if node with same path is already
 
          marked to be *removed*
 
        :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
 
          changeset
 
        :raises ``NodeNotChangedError``: if node hasn't really be changed
 
        """
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.removed):
 
                raise NodeAlreadyRemovedError("Node at %s is already marked "
 
                    "as removed" % node.path)
 
        try:
 
            self.repository.get_changeset()
 
        except EmptyRepositoryError:
 
            raise EmptyRepositoryError("Nothing to change - try to *add* new "
 
                "nodes rather than changing them")
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.changed):
 
                raise NodeAlreadyChangedError("Node at '%s' is already "
 
                    "marked as changed" % node.path)
 
            self.changed.append(node)
 

	
 
    def remove(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
 
        *removed* in next commit.
 

	
 
        :raises ``NodeAlreadyRemovedError``: if node has been already marked to
 
          be *removed*
 
        :raises ``NodeAlreadyChangedError``: if node has been already marked to
 
          be *changed*
 
        """
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.removed):
 
                raise NodeAlreadyRemovedError("Node is already marked to "
 
                    "for removal at %s" % node.path)
 
            if node.path in (n.path for n in self.changed):
 
                raise NodeAlreadyChangedError("Node is already marked to "
 
                    "be changed at %s" % node.path)
 
            # We only mark node as *removed* - real removal is done by
 
            # commit method
 
            self.removed.append(node)
 

	
 
    def reset(self):
 
        """
 
        Resets this instance to initial state (cleans ``added``, ``changed``
 
        and ``removed`` lists).
 
        """
 
        self.added = []
 
        self.changed = []
 
        self.removed = []
 
        self.parents = []
 

	
 
    def get_ipaths(self):
 
        """
 
        Returns generator of paths from nodes marked as added, changed or
 
        removed.
 
        """
 
        for node in chain(self.added, self.changed, self.removed):
 
            yield node.path
 

	
 
    def get_paths(self):
 
        """
 
        Returns list of paths from nodes marked as added, changed or removed.
 
        """
 
        return list(self.get_ipaths())
 

	
 
    def check_integrity(self, parents=None):
 
        """
 
        Checks in-memory changeset's integrity. Also, sets parents if not
 
        already set.
 

	
 
        :raises CommitError: if any error occurs (i.e.
 
          ``NodeDoesNotExistError``).
 
        """
 
        if not self.parents:
 
            parents = parents or []
 
            if len(parents) == 0:
 
                try:
 
                    parents = [self.repository.get_changeset(), None]
 
                except EmptyRepositoryError:
 
                    parents = [None, None]
 
            elif len(parents) == 1:
 
                parents += [None]
 
            self.parents = parents
 

	
 
        # Local parents, only if not None
 
        parents = [p for p in self.parents if p]
 

	
 
        # Check nodes marked as added
 
        for p in parents:
 
            for node in self.added:
 
                try:
 
                    p.get_node(node.path)
 
                except NodeDoesNotExistError:
 
                    pass
 
                else:
 
                    raise NodeAlreadyExistsError("Node at %s already exists "
 
                        "at %s" % (node.path, p))
 

	
 
        # Check nodes marked as changed
 
        missing = set(self.changed)
 
        not_changed = set(self.changed)
 
        if self.changed and not parents:
 
            raise NodeDoesNotExistError(str(self.changed[0].path))
 
        for p in parents:
 
            for node in self.changed:
 
                try:
 
                    old = p.get_node(node.path)
 
                    missing.remove(node)
 
                    if old.content != node.content:
 
                        not_changed.remove(node)
 
                except NodeDoesNotExistError:
 
                    pass
 
        if self.changed and missing:
 
            raise NodeDoesNotExistError("Node at %s is missing "
 
                "(parents: %s)" % (node.path, parents))
 

	
 
        if self.changed and not_changed:
 
            raise NodeNotChangedError("Node at %s wasn't actually changed "
 
                "since parents' changesets: %s" % (not_changed.pop().path,
 
                    parents)
 
            )
 

	
 
        # Check nodes marked as removed
 
        if self.removed and not parents:
 
            raise NodeDoesNotExistError("Cannot remove node at %s as there "
 
                "were no parents specified" % self.removed[0].path)
 
        really_removed = set()
 
        for p in parents:
 
            for node in self.removed:
 
                try:
 
                    p.get_node(node.path)
 
                    really_removed.add(node)
 
                except ChangesetError:
 
                    pass
 
        not_removed = set(self.removed) - really_removed
 
        if not_removed:
 
            raise NodeDoesNotExistError("Cannot remove node at %s from "
 
                "following parents: %s" % (not_removed[0], parents))
 

	
 
    def commit(self, message, author, parents=None, branch=None, date=None,
 
            **kwargs):
 
        """
 
        Performs in-memory commit (doesn't check workdir in any way) and
 
        returns newly created ``Changeset``. Updates repository's
 
        ``revisions``.
 

	
 
        .. note::
 
            While overriding this method each backend's should call
 
            ``self.check_integrity(parents)`` in the first place.
 

	
 
        :param message: message of the commit
 
        :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
 
        :param parents: single parent or sequence of parents from which commit
 
          would be derieved
 
        :param date: ``datetime.datetime`` instance. Defaults to
 
          ``datetime.datetime.now()``.
 
        :param branch: branch name, as string. If none given, default backend's
 
          branch would be used.
 

	
 
        :raises ``CommitError``: if any error occurs while committing
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class EmptyChangeset(BaseChangeset):
 
    """
 
    An dummy empty changeset. It's possible to pass hash when creating
 
    an EmptyChangeset
 
    """
 

	
 
    def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
 
                 alias=None, revision=-1, message='', author='', date=''):
 
        self._empty_cs = cs
 
        self.revision = revision
 
        self.message = message
 
        self.author = author
 
        self.date = date
 
        self.repository = repo
 
        self.requested_revision = requested_revision
 
        self.alias = alias
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """
 
        Returns raw string identifying this changeset, useful for web
 
        representation.
 
        """
 

	
 
        return self._empty_cs
 

	
 
    @LazyProperty
 
    def branch(self):
 
        from rhodecode.lib.vcs.backends import get_backend
 
        return get_backend(self.alias).DEFAULT_BRANCH_NAME
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    def get_file_changeset(self, path):
 
        return self
 

	
 
    def get_file_content(self, path):
 
        return u''
 

	
 
    def get_file_size(self, path):
 
        return 0
rhodecode/model/db.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.model.db
 
    ~~~~~~~~~~~~~~~~~~
 

	
 
    Database Models for RhodeCode
 

	
 
    :created_on: Apr 08, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import logging
 
import datetime
 
import traceback
 
import hashlib
 
import time
 
from collections import defaultdict
 

	
 
from sqlalchemy import *
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
 
from sqlalchemy.exc import DatabaseError
 
from beaker.cache import cache_region, region_invalidate
 
from webob.exc import HTTPNotFound
 

	
 
from pylons.i18n.translation import lazy_ugettext as _
 

	
 
from rhodecode.lib.vcs import get_backend
 
from rhodecode.lib.vcs.utils.helpers import get_scm
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 

	
 
from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
 
    safe_unicode, remove_suffix, remove_prefix
 
from rhodecode.lib.compat import json
 
from rhodecode.lib.caching_query import FromCache
 

	
 
from rhodecode.model.meta import Base, Session
 

	
 
URL_SEP = '/'
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
_hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest()
 

	
 

	
 
class BaseModel(object):
 
    """
 
    Base Model for all classess
 
    """
 

	
 
    @classmethod
 
    def _get_keys(cls):
 
        """return column names for this model """
 
        return class_mapper(cls).c.keys()
 

	
 
    def get_dict(self):
 
        """
 
        return dict with keys and values corresponding
 
        to this model data """
 

	
 
        d = {}
 
        for k in self._get_keys():
 
            d[k] = getattr(self, k)
 

	
 
        # also use __json__() if present to get additional fields
 
        _json_attr = getattr(self, '__json__', None)
 
        if _json_attr:
 
            # update with attributes from __json__
 
            if callable(_json_attr):
 
                _json_attr = _json_attr()
 
            for k, val in _json_attr.iteritems():
 
                d[k] = val
 
        return d
 

	
 
    def get_appstruct(self):
 
        """return list with keys and values tupples corresponding
 
        to this model data """
 

	
 
        l = []
 
        for k in self._get_keys():
 
            l.append((k, getattr(self, k),))
 
        return l
 

	
 
    def populate_obj(self, populate_dict):
 
        """populate model with data from given populate_dict"""
 

	
 
        for k in self._get_keys():
 
            if k in populate_dict:
 
                setattr(self, k, populate_dict[k])
 

	
 
    @classmethod
 
    def query(cls):
 
        return Session().query(cls)
 

	
 
    @classmethod
 
    def get(cls, id_):
 
        if id_:
 
            return cls.query().get(id_)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        try:
 
            id_ = int(id_)
 
        except (TypeError, ValueError):
 
            raise HTTPNotFound
 

	
 
        res = cls.query().get(id_)
 
        if not res:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def getAll(cls):
 
        return cls.query().all()
 

	
 
    @classmethod
 
    def delete(cls, id_):
 
        obj = cls.query().get(id_)
 
        Session().delete(obj)
 

	
 
    def __repr__(self):
 
        if hasattr(self, '__unicode__'):
 
            # python repr needs to return str
 
            return safe_str(self.__unicode__())
 
        return '<DB:%s>' % (self.__class__.__name__)
 

	
 

	
 
class RhodeCodeSetting(Base, BaseModel):
 
    __tablename__ = 'rhodecode_settings'
 
    __table_args__ = (
 
        UniqueConstraint('app_settings_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    app_settings_name = Column("app_settings_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _app_settings_value = Column("app_settings_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __init__(self, k='', v=''):
 
        self.app_settings_name = k
 
        self.app_settings_value = v
 

	
 
    @validates('_app_settings_value')
 
    def validate_settings_value(self, key, val):
 
        assert type(val) == unicode
 
        return val
 

	
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        if self.app_settings_name in ["ldap_active",
 
                                      "default_repo_enable_statistics",
 
                                      "default_repo_enable_locking",
 
                                      "default_repo_private",
 
                                      "default_repo_enable_downloads"]:
 
            v = str2bool(v)
 
        return v
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use unicode in app_settings_value
 

	
 
        :param val:
 
        """
 
        self._app_settings_value = safe_unicode(val)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__,
 
            self.app_settings_name, self.app_settings_value
 
        )
 

	
 
    @classmethod
 
    def get_by_name(cls, key):
 
        return cls.query()\
 
            .filter(cls.app_settings_name == key).scalar()
 

	
 
    @classmethod
 
    def get_by_name_or_create(cls, key):
 
        res = cls.get_by_name(key)
 
        if not res:
 
            res = cls(key)
 
        return res
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if not ret:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings['rhodecode_' + each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_ldap_settings(cls, cache=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('ldap_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd.update({row.app_settings_name: row.app_settings_value})
 

	
 
        return fd
 

	
 
    @classmethod
 
    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('default_')).all()
 
        fd = {}
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 

	
 
class RhodeCodeUi(Base, BaseModel):
 
    __tablename__ = 'rhodecode_ui'
 
    __table_args__ = (
 
        UniqueConstraint('ui_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'changegroup.push_logger'
 
    HOOK_PRE_PUSH = 'prechangegroup.pre_push'
 
    HOOK_PULL = 'outgoing.pull_logger'
 
    HOOK_PRE_PULL = 'preoutgoing.pre_pull'
 

	
 
    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    ui_section = Column("ui_section", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_key = Column("ui_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_value = Column("ui_value", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key).scalar()
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                     cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                     cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE,
 
                                      cls.HOOK_PUSH, cls.HOOK_PRE_PUSH,
 
                                      cls.HOOK_PULL, cls.HOOK_PRE_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key) or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session().add(new_ui)
 

	
 
    def __repr__(self):
 
        return '<DB:%s[%s:%s]>' % (self.__class__.__name__, self.ui_key,
 
                                   self.ui_value)
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        UniqueConstraint('username'), UniqueConstraint('email'),
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    DEFAULT_USER = 'default'
 
    DEFAULT_PERMISSIONS = [
 
        'hg.register.manual_activate', 'hg.create.repository',
 
        'hg.fork.repository', 'repository.read', 'group.read'
 
    ]
 
    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    password = Column("password", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("firstname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    lastname = Column("lastname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    ldap_dn = Column("ldap_dn", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UsersGroupMember', cascade='all')
 

	
 
    notifications = relationship('UserNotification', cascade='all')
 
    # notifications assigned to this user
 
    user_created_notifications = relationship('Notification', cascade='all')
 
    # comments created by this user
 
    user_comments = relationship('ChangesetComment', cascade='all')
 
    #extra emails for this user
 
    user_emails = relationship('UserEmailMap', cascade='all')
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 
    @property
 
    def firstname(self):
 
        # alias for future
 
        return self.name
 

	
 
    @property
 
    def emails(self):
 
        other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
 
        return [self.email] + [x.email for x in other]
 

	
 
    @property
 
    def ip_addresses(self):
 
        ret = UserIpMap.query().filter(UserIpMap.user == self).all()
 
        return [x.ip_addr for x in ret]
 

	
 
    @property
 
    def username_and_name(self):
 
        return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def full_name_or_username(self):
 
        return ('%s %s' % (self.firstname, self.lastname)
 
                if (self.firstname and self.lastname) else self.username)
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.firstname, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    def __unicode__(self):
 
        return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                     self.user_id, self.username)
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.username.ilike(username))
 
        else:
 
            q = cls.query().filter(cls.username == username)
 

	
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(username)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key, cache=False):
 
        q = cls.query().filter(cls.api_key == api_key)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_api_key_%s" % api_key))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_email(cls, email, case_insensitive=False, cache=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.email.ilike(email))
 
        else:
 
            q = cls.query().filter(cls.email == email)
 

	
 
        if cache:
 
            q = q.options(FromCache("sql_cache_short",
 
                                    "get_email_key_%s" % email))
 

	
 
        ret = q.scalar()
 
        if ret is None:
 
            q = UserEmailMap.query()
 
            # try fetching in alternate email map
 
            if case_insensitive:
 
                q = q.filter(UserEmailMap.email.ilike(email))
 
            else:
 
                q = q.filter(UserEmailMap.email == email)
 
            q = q.options(joinedload(UserEmailMap.user))
 
            if cache:
 
                q = q.options(FromCache("sql_cache_short",
 
                                        "get_email_map_key_%s" % email))
 
            ret = getattr(q.scalar(), 'user', None)
 

	
 
        return ret
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 
        self.last_login = datetime.datetime.now()
 
        Session().add(self)
 
        log.debug('updated user %s lastlogin' % self.username)
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating user related data for API
 
        """
 
        user = self
 
        data = dict(
 
            user_id=user.user_id,
 
            username=user.username,
 
            firstname=user.name,
 
            lastname=user.lastname,
 
            email=user.email,
 
            emails=user.emails,
 
            api_key=user.api_key,
 
            active=user.active,
 
            admin=user.admin,
 
            ldap_dn=user.ldap_dn,
 
            last_login=user.last_login,
 
            ip_addresses=user.ip_addresses
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
            full_name=self.full_name,
 
            full_name_or_username=self.full_name_or_username,
 
            short_contact=self.short_contact,
 
            full_contact=self.full_contact
 
        )
 
        data.update(self.get_api_data())
 
        return data
 

	
 

	
 
class UserEmailMap(Base, BaseModel):
 
    __tablename__ = 'user_email_map'
 
    __table_args__ = (
 
        Index('uem_email_idx', 'email'),
 
        UniqueConstraint('email'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    _email = Column("email", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    user = relationship('User', lazy='joined')
 

	
 
    @validates('_email')
 
    def validate_email(self, key, email):
 
        # check if this email is not main one
 
        main_email = Session().query(User).filter(User.email == email).scalar()
 
        if main_email is not None:
 
            raise AttributeError('email %s is present is user table' % email)
 
        return email
 

	
 
    @hybrid_property
 
    def email(self):
 
        return self._email
 

	
 
    @email.setter
 
    def email(self, val):
 
        self._email = val.lower() if val else None
 

	
 

	
 
class UserIpMap(Base, BaseModel):
 
    __tablename__ = 'user_ip_map'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'ip_addr'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    __mapper_args__ = {}
 

	
 
    ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    ip_addr = Column("ip_addr", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=True)
 
    user = relationship('User', lazy='joined')
 

	
 
    @classmethod
 
    def _get_ip_range(cls, ip_addr):
 
        from rhodecode.lib import ipaddr
 
        net = ipaddr.IPv4Network(ip_addr)
 
        return [str(net.network), str(net.broadcast)]
 

	
 
    def __json__(self):
 
        return dict(
 
          ip_addr=self.ip_addr,
 
          ip_range=self._get_ip_range(self.ip_addr)
 
        )
 

	
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    username = Column("username", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    repository_name = Column("repository_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    user_ip = Column("user_ip", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action = Column("action", UnicodeText(1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    @property
 
    def action_as_day(self):
 
        return datetime.date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository', cascade='')
 

	
 

	
 
class UsersGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_name = Column("users_group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
 
    inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
 

	
 
    members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
 
    users_group_to_perm = relationship('UsersGroupToPerm', cascade='all')
 
    users_group_repo_to_perm = relationship('UsersGroupRepoToPerm', cascade='all')
 

	
 
    def __unicode__(self):
 
        return u'<userGroup(%s)>' % (self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False,
 
                          case_insensitive=False):
 
        if case_insensitive:
 
            q = cls.query().filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            q = cls.query().filter(cls.users_group_name == group_name)
 
        if cache:
 
            q = q.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_user_%s" % _hash_key(group_name)
 
                          )
 
            )
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get(cls, users_group_id, cache=False):
 
        users_group = cls.query()
 
        if cache:
 
            users_group = users_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % users_group_id))
 
        return users_group.get(users_group_id)
 

	
 
    def get_api_data(self):
 
        users_group = self
 

	
 
        data = dict(
 
            users_group_id=users_group.users_group_id,
 
            group_name=users_group.users_group_name,
 
            active=users_group.users_group_active,
 
        )
 

	
 
        return data
 

	
 

	
 
class UsersGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = (
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User', lazy='joined')
 
    users_group = relationship('UsersGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        UniqueConstraint('repo_name'),
 
        Index('r_repo_name_idx', 'repo_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 

	
 
    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repo_name = Column("repo_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    clone_uri = Column("clone_uri", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    repo_type = Column("repo_type", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column("description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 
    landing_rev = Column("landing_revision", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 
    _locked = Column("locked", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    #changeset_cache = Column("changeset_cache", LargeBinary(), nullable=False) #JSON data
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) #JSON data
 

	
 
    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
 
                             cascade='all')
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete, delete-orphan")
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
 
                                   self.repo_name)
 

	
 
    @hybrid_property
 
    def locked(self):
 
        # always should return [user_id, timelocked]
 
        if self._locked:
 
            _lock_info = self._locked.split(':')
 
            return int(_lock_info[0]), _lock_info[1]
 
        return [None, None]
 

	
 
    @locked.setter
 
    def locked(self, val):
 
        if val and isinstance(val, (list, tuple)):
 
            self._locked = ':'.join(map(str, val))
 
        else:
 
            self._locked = None
 

	
 
    @hybrid_property
 
    def changeset_cache(self):
 
        from rhodecode.lib.vcs.backends.base import EmptyChangeset
 
        dummy = EmptyChangeset().__json__()
 
        if not self._changeset_cache:
 
            return dummy
 
        try:
 
            return json.loads(self._changeset_cache)
 
        except TypeError:
 
            return dummy
 

	
 
    @changeset_cache.setter
 
    def changeset_cache(self, val):
 
        try:
 
            self._changeset_cache = json.dumps(val)
 
        except:
 
            log.error(traceback.format_exc())
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session().query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.scalar()
 

	
 
    @classmethod
 
    def get_by_full_path(cls, repo_full_path):
 
        repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
 
        return cls.get_by_repo_name(repo_name.strip(URL_SEP))
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session().query(RhodeCodeUi)\
 
            .filter(RhodeCodeUi.ui_key == cls.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def forks(self):
 
        """
 
        Return forks of this repo
 
        """
 
        return Repository.get_repo_forks(self.repo_id)
 

	
 
    @property
 
    def parent(self):
 
        """
 
        Returns fork parent
 
        """
 
        return self.fork
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session().query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
 
                                              Repository.url_sep())
 
        q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*p)
 

	
 
    @property
 
    def cache_keys(self):
 
        """
 
        Returns associated cache keys for that repo
 
        """
 
        return CacheInvalidation.query()\
 
            .filter(CacheInvalidation.cache_args == self.repo_name)\
 
            .order_by(CacheInvalidation.cache_key)\
 
            .all()
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from rhodecode.lib.utils import make_ui
 
        return make_ui('db', clear_session=False)
 

	
 
    @classmethod
 
    def inject_ui(cls, repo, extras={}):
 
        from rhodecode.lib.vcs.backends.hg import MercurialRepository
 
        from rhodecode.lib.vcs.backends.git import GitRepository
 
        required = (MercurialRepository, GitRepository)
 
        if not isinstance(repo, required):
 
            raise Exception('repo must be instance of %s' % required)
 

	
 
        # inject ui extra param to log this action via push logger
 
        for k, v in extras.items():
 
            repo._repo.ui.setconfig('rhodecode_extras', k, v)
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from rhodecode.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 
    def get_api_data(self):
 
        """
 
        Common function for generating repo api data
 

	
 
        """
 
        repo = self
 
        data = dict(
 
            repo_id=repo.repo_id,
 
            repo_name=repo.repo_name,
 
            repo_type=repo.repo_type,
 
            clone_uri=repo.clone_uri,
 
            private=repo.private,
 
            created_on=repo.created_on,
 
            description=repo.description,
 
            landing_rev=repo.landing_rev,
 
            owner=repo.user.username,
 
            fork_of=repo.fork.repo_name if repo.fork else None,
 
            enable_statistics=repo.enable_statistics,
 
            enable_locking=repo.enable_locking,
 
            enable_downloads=repo.enable_downloads
 
        )
 

	
 
        return data
 

	
 
    @classmethod
 
    def lock(cls, repo, user_id):
 
        repo.locked = [user_id, time.time()]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        cs = self.get_changeset(self.landing_rev) or self.get_changeset()
 
        return cs
 

	
 
    def update_last_change(self, last_change=None):
 
        if last_change is None:
 
            last_change = datetime.datetime.now()
 
        if self.updated_on is None or self.updated_on != last_change:
 
            log.debug('updated repo %s with new date %s' % (self, last_change))
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from rhodecode.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = self.get_changeset()
 
        if isinstance(cs_cache, BaseChangeset):
 
            cs_cache = cs_cache.__json__()
 

	
 
        if cs_cache != self.changeset_cache:
 
            last_change = cs_cache.get('date') or self.last_change
 
            log.debug('updated repo %s with new cs cache %s' % (self, cs_cache))
 
            self.updated_on = last_change
 
            self.changeset_cache = cs_cache
 
            Session().add(self)
 
            Session().commit()
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    def get_comments(self, revisions=None):
 
        """
 
        Returns comments for this repository grouped by revisions
 

	
 
        :param revisions: filter query by revisions only
 
        """
 
        cmts = ChangesetComment.query()\
 
            .filter(ChangesetComment.repo == self)
 
        if revisions:
 
            cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
 
        grouped = defaultdict(list)
 
        for cmt in cmts.all():
 
            grouped[cmt.revision].append(cmt)
 
        return grouped
 

	
 
    def statuses(self, revisions=None):
 
        """
 
        Returns statuses for this repository
 

	
 
        :param revisions: list of revisions to get statuses for
 
        :type revisions: list
 
        """
 

	
 
        statuses = ChangesetStatus.query()\
 
            .filter(ChangesetStatus.repo == self)\
 
            .filter(ChangesetStatus.version == 0)
 
        if revisions:
 
            statuses = statuses.filter(ChangesetStatus.revision.in_(revisions))
 
        grouped = {}
 

	
 
        #maybe we have open new pullrequest without a status ?
 
        stat = ChangesetStatus.STATUS_UNDER_REVIEW
 
        status_lbl = ChangesetStatus.get_status_lbl(stat)
 
        for pr in PullRequest.query().filter(PullRequest.org_repo == self).all():
 
            for rev in pr.revisions:
 
                pr_id = pr.pull_request_id
 
                pr_repo = pr.other_repo.repo_name
 
                grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
 

	
 
        for stat in statuses.all():
 
            pr_id = pr_repo = None
 
            if stat.pull_request:
 
                pr_id = stat.pull_request.pull_request_id
 
                pr_repo = stat.pull_request.other_repo.repo_name
 
            grouped[stat.revision] = [str(stat.status), stat.status_lbl,
 
                                      pr_id, pr_repo]
 
        return grouped
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(repo_name=self.repo_name)
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        import rhodecode
 
        full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
 
        if full_cache:
 
            return self.scm_instance_cached()
 
        return self.__get_instance()
 

	
 
    def scm_instance_cached(self, cache_map=None):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 
        log.debug('Getting cached instance of repo')
 

	
 
        if cache_map:
 
            # get using prefilled cache_map
 
            invalidate_repo = cache_map[self.repo_name]
 
            if invalidate_repo:
 
                invalidate_repo = (None if invalidate_repo.cache_active
 
                                   else invalidate_repo)
 
        else:
 
            # get from invalidate
 
            invalidate_repo = self.invalidate
 

	
 
        if invalidate_repo is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(invalidate_repo.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 
        repo_full_path = self.repo_full_path
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository' % alias)
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        UniqueConstraint('group_name', 'group_parent_id'),
 
        CheckConstraint('group_id != group_parent_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    __mapper_args__ = {'order_by': 'group_name'}
 

	
 
    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    group_name = Column("group_name", String(255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column("group_description", String(10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UsersGroupRepoGroupToPerm', cascade='all')
 

	
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                  self.group_name)
 

	
 
    @classmethod
 
    def groups_choices(cls, check_perms=False):
 
        from webhelpers.html import literal as _literal
 
        from rhodecode.model.scm import ScmModel
 
        groups = cls.query().all()
 
        if check_perms:
 
            #filter group user have access to, it's done
 
            #magically inside ScmModel based on current user
 
            groups = ScmModel().get_repos_groups(groups)
 
        repo_groups = [('', '')]
 
        sep = ' &raquo; '
 
        _name = lambda k: _literal(sep.join(k))
 

	
 
        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
 
                              for x in groups])
 

	
 
        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
 
        return repo_groups
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 5
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinit loops
 
                log.error('group nested more than %s' %
 
                          parents_recursion_limit)
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(RepoGroup.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(RepoGroup.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query()\
 
                .filter(Repository.group == self)\
 
                .order_by(Repository.repo_name)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 
    def recursive_groups_and_repos(self):
 
        """
 
        Recursive return all groups, with repositories in those groups
 
        """
 
        all_ = []
 

	
 
        def _get_members(root_gr):
 
            for r in root_gr.repositories:
 
                all_.append(r)
 
            childs = root_gr.children.all()
 
            if childs:
 
                for gr in childs:
 
                    all_.append(gr)
 
                    _get_members(gr)
 

	
 
        _get_members(self)
 
        return [self] + all_
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return RepoGroup.url_sep().join(path_prefix + [group_name])
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = (
 
        Index('p_perm_name_idx', 'permission_name'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    PERMS = [
 
        ('repository.none', _('Repository no access')),
 
        ('repository.read', _('Repository read access')),
 
        ('repository.write', _('Repository write access')),
 
        ('repository.admin', _('Repository admin access')),
 

	
 
        ('group.none', _('Repositories Group no access')),
 
        ('group.read', _('Repositories Group read access')),
 
        ('group.write', _('Repositories Group write access')),
 
        ('group.admin', _('Repositories Group admin access')),
 

	
 
        ('hg.admin', _('RhodeCode Administrator')),
 
        ('hg.create.none', _('Repository creation disabled')),
 
        ('hg.create.repository', _('Repository creation enabled')),
 
        ('hg.fork.none', _('Repository forking disabled')),
 
        ('hg.fork.repository', _('Repository forking enabled')),
 
        ('hg.register.none', _('Register disabled')),
 
        ('hg.register.manual_activate', _('Register new user with RhodeCode '
 
                                          'with manual activation')),
 

	
 
        ('hg.register.auto_activate', _('Register new user with RhodeCode '
 
                                        'with auto activation')),
 
    ]
 

	
 
    # defines which permissions are more important higher the more important
 
    PERM_WEIGHTS = {
 
        'repository.none': 0,
 
        'repository.read': 1,
 
        'repository.write': 3,
 
        'repository.admin': 4,
 

	
 
        'group.none': 0,
 
        'group.read': 1,
 
        'group.write': 3,
 
        'group.admin': 4,
 

	
 
        'hg.fork.none': 0,
 
        'hg.fork.repository': 1,
 
        'hg.create.none': 0,
 
        'hg.create.repository':1
 
    }
 

	
 
    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    permission_name = Column("permission_name", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    permission_longname = Column("permission_longname", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (
 
            self.__class__.__name__, self.permission_id, self.permission_name
 
        )
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
    @classmethod
 
    def get_default_perms(cls, default_user_id):
 
        q = Session().query(UserRepoToPerm, Repository, cls)\
 
         .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
 
         .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 
    @classmethod
 
    def get_default_group_perms(cls, default_user_id):
 
        q = Session().query(UserRepoGroupToPerm, RepoGroup, cls)\
 
         .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
         .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\
 
         .filter(UserRepoGroupToPerm.user_id == default_user_id)
 

	
 
        return q.all()
 

	
 

	
 
class UserRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'repository_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def create(cls, user, repository, permission):
 
        n = cls()
 
        n.user = user
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<user:%s => %s >' % (self.user, self.repository)
 

	
 

	
 
class UserToPerm(Base, BaseModel):
 
    __tablename__ = 'user_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission', lazy='joined')
 

	
 

	
 
class UsersGroupRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UsersGroup')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
    @classmethod
 
    def create(cls, users_group, repository, permission):
 
        n = cls()
 
        n.users_group = users_group
 
        n.repository = repository
 
        n.permission = permission
 
        Session().add(n)
 
        return n
 

	
 
    def __unicode__(self):
 
        return u'<userGroup:%s => %s >' % (self.users_group, self.repository)
 

	
 

	
 
class UsersGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'permission_id',),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UsersGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UserRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'user_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'group_id', 'permission_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    group = relationship('RepoGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
class UsersGroupRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_group_to_perm'
 
    __table_args__ = (
 
        UniqueConstraint('users_group_id', 'group_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UsersGroup')
 
    permission = relationship('Permission')
 
    group = relationship('RepoGroup')
 

	
 

	
 
class Statistics(Base, BaseModel):
 
    __tablename__ = 'statistics'
 
    __table_args__ = (
 
         UniqueConstraint('repository_id'),
 
         {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
          'mysql_charset': 'utf8'}
 
    )
 
    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
 
    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
 
    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
 
    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
 
    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
 

	
 
    repository = relationship('Repository', single_parent=True)
 

	
 

	
 
class UserFollowing(Base, BaseModel):
 
    __tablename__ = 'user_followings'
 
    __table_args__ = (
 
        UniqueConstraint('user_id', 'follows_repository_id'),
 
        UniqueConstraint('user_id', 'follows_user_id'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 

	
 
    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
 
    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 

	
 
    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
 
    follows_repository = relationship('Repository', order_by='Repository.repo_name')
 

	
 
    @classmethod
 
    def get_repo_followers(cls, repo_id):
 
        return cls.query().filter(cls.follows_repo_id == repo_id)
 

	
 

	
 
class CacheInvalidation(Base, BaseModel):
 
    __tablename__ = 'cache_invalidation'
 
    __table_args__ = (
 
        UniqueConstraint('cache_key'),
 
        Index('key_idx', 'cache_key'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    cache_key = Column("cache_key", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_args = Column("cache_args", String(255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
 

	
 
    def __init__(self, cache_key, cache_args=''):
 
        self.cache_key = cache_key
 
        self.cache_args = cache_args
 
        self.cache_active = False
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.cache_id, self.cache_key)
 

	
 
    @property
 
    def prefix(self):
 
        _split = self.cache_key.split(self.cache_args, 1)
 
        if _split and len(_split) == 2:
 
            return _split[0]
 
        return ''
 

	
 
    @classmethod
 
    def clear_cache(cls):
 
        cls.query().delete()
 

	
 
    @classmethod
 
    def _get_key(cls, key):
 
        """
 
        Wrapper for generating a key, together with a prefix
 

	
 
        :param key:
 
        """
 
        import rhodecode
 
        prefix = ''
 
        org_key = key
 
        iid = rhodecode.CONFIG.get('instance_id')
 
        if iid:
 
            prefix = iid
 

	
 
        return "%s%s" % (prefix, key), prefix, org_key
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.cache_key == key).scalar()
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        return cls.query().filter(cls.cache_args == repo_name).all()
 

	
 
    @classmethod
 
    def _get_or_create_key(cls, key, repo_name, commit=True):
 
        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
 
        if not inv_obj:
 
            try:
 
                inv_obj = CacheInvalidation(key, repo_name)
 
                Session().add(inv_obj)
 
                if commit:
 
                    Session().commit()
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                Session().rollback()
 
        return inv_obj
 

	
 
    @classmethod
 
    def invalidate(cls, key):
 
        """
 
        Returns Invalidation object if this given key should be invalidated
 
        None otherwise. `cache_active = False` means that this cache
 
        state is not valid and needs to be invalidated
 

	
 
        :param key:
 
        """
 
        repo_name = key
 
        repo_name = remove_suffix(repo_name, '_README')
 
        repo_name = remove_suffix(repo_name, '_RSS')
 
        repo_name = remove_suffix(repo_name, '_ATOM')
 

	
 
        # adds instance prefix
 
        key, _prefix, _org_key = cls._get_key(key)
 
        inv = cls._get_or_create_key(key, repo_name)
 

	
 
        if inv and inv.cache_active is False:
 
            return inv
 

	
 
    @classmethod
 
    def set_invalidate(cls, key=None, repo_name=None):
 
        """
 
        Mark this Cache key for invalidation, either by key or whole
 
        cache sets based on repo_name
 

	
 
        :param key:
 
        """
 
        if key:
 
            key, _prefix, _org_key = cls._get_key(key)
 
            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
 
        elif repo_name:
 
            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 

	
 
        log.debug('marking %s key[s] for invalidation based on key=%s,repo_name=%s'
 
                  % (len(inv_objs), key, repo_name))
 
        try:
 
            for inv_obj in inv_objs:
 
                inv_obj.cache_active = False
 
                Session().add(inv_obj)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 

	
 
    @classmethod
 
    def set_valid(cls, key):
 
        """
 
        Mark this cache key as active and currently cached
 

	
 
        :param key:
 
        """
 
        inv_obj = cls.get_by_key(key)
 
        inv_obj.cache_active = True
 
        Session().add(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def get_cache_map(cls):
 

	
 
        class cachemapdict(dict):
 

	
 
            def __init__(self, *args, **kwargs):
 
                fixkey = kwargs.get('fixkey')
 
                if fixkey:
 
                    del kwargs['fixkey']
 
                self.fixkey = fixkey
 
                super(cachemapdict, self).__init__(*args, **kwargs)
 

	
 
            def __getattr__(self, name):
 
                key = name
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                if key in self.__dict__:
 
                    return self.__dict__[key]
 
                else:
 
                    return self[key]
 

	
 
            def __getitem__(self, key):
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                try:
 
                    return super(cachemapdict, self).__getitem__(key)
 
                except KeyError:
 
                    return
 

	
 
        cache_map = cachemapdict(fixkey=True)
 
        for obj in cls.query().all():
 
            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
 
        return cache_map
 

	
 

	
 
class ChangesetComment(Base, BaseModel):
 
    __tablename__ = 'changeset_comments'
 
    __table_args__ = (
 
        Index('cc_revision_idx', 'revision'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
 
    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    revision = Column('revision', String(40), nullable=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 
    line_no = Column('line_no', Unicode(10), nullable=True)
 
    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
 
    f_path = Column('f_path', Unicode(1000), nullable=True)
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    text = Column('text', UnicodeText(25000), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    author = relationship('User', lazy='joined')
 
    repo = relationship('Repository')
 
    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
 
    pull_request = relationship('PullRequest', lazy='joined')
 

	
 
    @classmethod
 
    def get_users(cls, revision=None, pull_request_id=None):
 
        """
 
        Returns user associated with this ChangesetComment. ie those
 
        who actually commented
 

	
 
        :param cls:
 
        :param revision:
 
        """
 
        q = Session().query(User)\
 
                .join(ChangesetComment.author)
 
        if revision:
 
            q = q.filter(cls.revision == revision)
 
        elif pull_request_id:
 
            q = q.filter(cls.pull_request_id == pull_request_id)
 
        return q.all()
 

	
 

	
 
class ChangesetStatus(Base, BaseModel):
 
    __tablename__ = 'changeset_statuses'
 
    __table_args__ = (
 
        Index('cs_revision_idx', 'revision'),
 
        Index('cs_version_idx', 'version'),
 
        UniqueConstraint('repo_id', 'revision', 'version'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'}
 
    )
 
    STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
 
    STATUS_APPROVED = 'approved'
 
    STATUS_REJECTED = 'rejected'
 
    STATUS_UNDER_REVIEW = 'under_review'
 

	
 
    STATUSES = [
 
        (STATUS_NOT_REVIEWED, _("Not Reviewed")),  # (no icon) and default
 
        (STATUS_APPROVED, _("Approved")),
 
        (STATUS_REJECTED, _("Rejected")),
 
        (STATUS_UNDER_REVIEW, _("Under Review")),
 
    ]
 

	
 
    changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
 
    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
 
    revision = Column('revision', String(40), nullable=False)
rhodecode/templates/index_base.html
Show inline comments
 
<%page args="parent" />
 
    <div class="box">
 
        <!-- box / title -->
 
        <div class="title">
 
            <h5>
 
            <input class="q_filter_box" id="q_filter" size="15" type="text" name="filter" value="${_('quick filter...')}"/> ${parent.breadcrumbs()} <span id="repo_count">0</span> ${_('repositories')}
 
            </h5>
 
            %if c.rhodecode_user.username != 'default':
 
                %if h.HasPermissionAny('hg.admin','hg.create.repository')():
 
                <ul class="links">
 
                  <li>
 
                  %if c.group:
 
                    <span>${h.link_to(_('ADD REPOSITORY'),h.url('admin_settings_create_repository',parent_group=c.group.group_id))}</span>
 
                  %else:
 
                    <span>${h.link_to(_('ADD REPOSITORY'),h.url('admin_settings_create_repository'))}</span>
 
                  %endif
 
                  </li>
 
                </ul>
 
                %endif
 
            %endif
 
        </div>
 
        <!-- end box / title -->
 
        <div class="table">
 
           % if c.groups:
 
            <div id='groups_list_wrap' class="yui-skin-sam">
 
              <table id="groups_list">
 
                  <thead>
 
                      <tr>
 
                          <th class="left"><a href="#">${_('Group name')}</a></th>
 
                          <th class="left"><a href="#">${_('Description')}</a></th>
 
                          ##<th class="left"><a href="#">${_('Number of repositories')}</a></th>
 
                      </tr>
 
                  </thead>
 

	
 
                  ## REPO GROUPS
 
                  % for gr in c.groups:
 
                    <tr>
 
                        <td>
 
                            <div style="white-space: nowrap">
 
                            <img class="icon" alt="${_('Repositories group')}" src="${h.url('/images/icons/database_link.png')}"/>
 
                            ${h.link_to(gr.name,url('repos_group_home',group_name=gr.group_name))}
 
                            </div>
 
                        </td>
 
                        %if c.visual.stylify_metatags:
 
                            <td>${h.urlify_text(h.desc_stylize(gr.group_description))}</td>
 
                        %else:
 
                            <td>${gr.group_description}</td>
 
                        %endif
 
                        ## this is commented out since for multi nested repos can be HEAVY!
 
                        ## in number of executed queries during traversing uncomment at will
 
                        ##<td><b>${gr.repositories_recursive_count}</b></td>
 
                    </tr>
 
                  % endfor
 

	
 
              </table>
 
            </div>
 
            <div style="height: 20px"></div>
 
            % endif
 
            <div id="welcome" style="display:none;text-align:center">
 
                <h1><a href="${h.url('home')}">${c.rhodecode_name} ${c.rhodecode_version}</a></h1>
 
            </div>
 
             <%cnt=0%>
 
             <%namespace name="dt" file="/data_table/_dt_elements.html"/>
 
            % if c.visual.lightweight_dashboard is False:
 
              ## old full detailed version
 
            <div id='repos_list_wrap' class="yui-skin-sam">
 
            <table id="repos_list">
 
            <thead>
 
                <tr>
 
                    <th class="left"></th>
 
                    <th class="left">${_('Name')}</th>
 
                    <th class="left">${_('Description')}</th>
 
                    <th class="left">${_('Last change')}</th>
 
                    <th class="left">${_('Tip')}</th>
 
                    <th class="left">${_('Owner')}</th>
 
                    <th class="left">${_('RSS')}</th>
 
                    <th class="left">${_('Atom')}</th>
 
                </tr>
 
            </thead>
 
            <tbody>
 
            %for cnt,repo in enumerate(c.repos_list):
 
                <tr class="parity${(cnt+1)%2}">
 
                    ##QUICK MENU
 
                    <td class="quick_repo_menu">
 
                      ${dt.quick_menu(repo['name'])}
 
                    </td>
 
                    ##REPO NAME AND ICONS
 
                    <td class="reponame">
 
                      ${dt.repo_name(repo['name'],repo['dbrepo']['repo_type'],repo['dbrepo']['private'],h.AttributeDict(repo['dbrepo_fork']),pageargs.get('short_repo_names'))}
 
                    </td>
 
                    ##DESCRIPTION
 
                    <td><span class="tooltip" title="${h.tooltip(repo['description'])}">
 
                       %if c.visual.stylify_metatags:
 
                       ${h.urlify_text(h.desc_stylize(h.truncate(repo['description'],60)))}</span>
 
                       %else:
 
                       ${h.truncate(repo['description'],60)}</span>
 
                       %endif
 
                    </td>
 
                    ##LAST CHANGE DATE
 
                    <td>
 
                      ${dt.last_change(repo['last_change'])}
 
                    </td>
 
                    ##LAST REVISION
 
                    <td>
 
                        ${dt.revision(repo['name'],repo['rev'],repo['tip'],repo['author'],repo['last_msg'])}
 
                    </td>
 
                    ##
 
                    <td title="${repo['contact']}">${h.person(repo['contact'])}</td>
 
                    <td>
 
                      ${dt.rss(repo['name'])}
 
                    </td>
 
                    <td>
 
                      ${dt.atom(repo['name'])}
 
                    </td>
 
                </tr>
 
            %endfor
 
            </tbody>
 
            </table>
 
            </div>
 
            % else:
 
              ## lightweight version
 
                <div class="yui-skin-sam" id="repos_list_wrap"></div>
 
                <div id="user-paginator" style="padding: 0px 0px 0px 0px"></div>
 
            % endif
 
        </div>
 
    </div>
 
    % if c.visual.lightweight_dashboard is False:
 
    <script>
 
      YUD.get('repo_count').innerHTML = ${cnt+1 if cnt else 0};
 
      var func = function(node){
 
          return node.parentNode.parentNode.parentNode.parentNode;
 
      }
 

	
 
      // groups table sorting
 
      var myColumnDefs = [
 
          {key:"name",label:"${_('Group name')}",sortable:true,
 
              sortOptions: { sortFunction: groupNameSort }},
 
          {key:"desc",label:"${_('Description')}",sortable:true},
 
      ];
 

	
 
      var myDataSource = new YAHOO.util.DataSource(YUD.get("groups_list"));
 

	
 
      myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE;
 
      myDataSource.responseSchema = {
 
          fields: [
 
              {key:"name"},
 
              {key:"desc"},
 
          ]
 
      };
 

	
 
      var myDataTable = new YAHOO.widget.DataTable("groups_list_wrap", myColumnDefs, myDataSource,{
 
          sortedBy:{key:"name",dir:"asc"},
 
          paginator: new YAHOO.widget.Paginator({
 
              rowsPerPage: 5,
 
              alwaysVisible: false,
 
              template : "{PreviousPageLink} {FirstPageLink} {PageLinks} {LastPageLink} {NextPageLink}",
 
              pageLinks: 5,
 
              containerClass: 'pagination-wh',
 
              currentPageClass: 'pager_curpage',
 
              pageLinkClass: 'pager_link',
 
              nextPageLinkLabel: '&gt;',
 
              previousPageLinkLabel: '&lt;',
 
              firstPageLinkLabel: '&lt;&lt;',
 
              lastPageLinkLabel: '&gt;&gt;',
 
              containers:['user-paginator']
 
          }),
 
          MSG_SORTASC:"${_('Click to sort ascending')}",
 
          MSG_SORTDESC:"${_('Click to sort descending')}"
 
      });
 

	
 
      // main table sorting
 
      var myColumnDefs = [
 
          {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"},
 
          {key:"name",label:"${_('Name')}",sortable:true,
 
              sortOptions: { sortFunction: nameSort }},
 
          {key:"desc",label:"${_('Description')}",sortable:true},
 
          {key:"last_change",label:"${_('Last Change')}",sortable:true,
 
              sortOptions: { sortFunction: ageSort }},
 
          {key:"tip",label:"${_('Tip')}",sortable:true,
 
        	  sortOptions: { sortFunction: revisionSort }},
 
          {key:"owner",label:"${_('Owner')}",sortable:true},
 
          {key:"rss",label:"",sortable:false},
 
          {key:"atom",label:"",sortable:false},
 
      ];
 

	
 
      var myDataSource = new YAHOO.util.DataSource(YUD.get("repos_list"));
 

	
 
      myDataSource.responseType = YAHOO.util.DataSource.TYPE_HTMLTABLE;
 

	
 
      myDataSource.responseSchema = {
 
          fields: [
 
              {key:"menu"},
 
              //{key:"raw_name"},
 
              {key:"name"},
 
              {key:"desc"},
 
              {key:"last_change"},
 
              {key:"tip"},
 
              {key:"owner"},
 
              {key:"rss"},
 
              {key:"atom"},
 
          ]
 
      };
 

	
 
      var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource,
 
              {
 
    	       sortedBy:{key:"name",dir:"asc"},
 
               MSG_SORTASC:"${_('Click to sort ascending')}",
 
               MSG_SORTDESC:"${_('Click to sort descending')}",
 
               MSG_EMPTY:"${_('No records found.')}",
 
               MSG_ERROR:"${_('Data error.')}",
 
               MSG_LOADING:"${_('Loading...')}",
 
              }
 
      );
 
      myDataTable.subscribe('postRenderEvent',function(oArgs) {
 
          tooltip_activate();
 
          quick_repo_menu();
 
          q_filter('q_filter',YUQ('div.table tr td a.repo_name'),func);
 
      });
 

	
 
    </script>
 
    % else:
 
      <script>
 
        //var url = "${h.url('formatted_users', format='json')}";
 
        var data = ${c.data|n};
 
        var myDataSource = new YAHOO.util.DataSource(data);
 
        myDataSource.responseType = YAHOO.util.DataSource.TYPE_JSON;
 

	
 
        myDataSource.responseSchema = {
 
            resultsList: "records",
 
            fields: [
 
               {key:"menu"},
 
               {key:"raw_name"},
 
               {key:"name"},
 
               {key:"desc"},
 
               {key:"last_change"},
 
               {key: "tip"},
 
               {key:"owner"},
 
               {key:"rss"},
 
               {key:"atom"},
 
            ]
 
         };
 
        myDataSource.doBeforeCallback = function(req,raw,res,cb) {
 
            // This is the filter function
 
            var data     = res.results || [],
 
                filtered = [],
 
                i,l;
 

	
 
            if (req) {
 
                req = req.toLowerCase();
 
                for (i = 0; i<data.length; i++) {
 
                    var pos = data[i].raw_name.toLowerCase().indexOf(req)
 
                    if (pos != -1) {
 
                        filtered.push(data[i]);
 
                    }
 
                }
 
                res.results = filtered;
 
            }
 
            YUD.get('repo_count').innerHTML = res.results.length;
 
            return res;
 
        }
 

	
 
        // main table sorting
 
        var myColumnDefs = [
 
            {key:"menu",label:"",sortable:false,className:"quick_repo_menu hidden"},
 
            {key:"name",label:"${_('Name')}",sortable:true,
 
                sortOptions: { sortFunction: nameSort }},
 
            {key:"desc",label:"${_('Description')}",sortable:true},
 
            {key:"last_change",label:"${_('Last Change')}",sortable:true,
 
                sortOptions: { sortFunction: ageSort }},
 
            {key:"tip",label:"${_('Tip')}",sortable:true,
 
                sortOptions: { sortFunction: revisionSort }},                
 
            {key:"owner",label:"${_('Owner')}",sortable:true},
 
            {key:"rss",label:"",sortable:false},
 
            {key:"atom",label:"",sortable:false},
 
        ];
 

	
 
        var myDataTable = new YAHOO.widget.DataTable("repos_list_wrap", myColumnDefs, myDataSource,{
 
          sortedBy:{key:"name",dir:"asc"},
 
          paginator: new YAHOO.widget.Paginator({
 
              rowsPerPage: ${c.visual.lightweight_dashboard_items},
 
              alwaysVisible: false,
 
              template : "{PreviousPageLink} {FirstPageLink} {PageLinks} {LastPageLink} {NextPageLink}",
 
              pageLinks: 5,
 
              containerClass: 'pagination-wh',
 
              currentPageClass: 'pager_curpage',
 
              pageLinkClass: 'pager_link',
 
              nextPageLinkLabel: '&gt;',
 
              previousPageLinkLabel: '&lt;',
 
              firstPageLinkLabel: '&lt;&lt;',
 
              lastPageLinkLabel: '&gt;&gt;',
 
              containers:['user-paginator']
 
          }),
 

	
 
          MSG_SORTASC:"${_('Click to sort ascending')}",
 
          MSG_SORTDESC:"${_('Click to sort descending')}",
 
          MSG_EMPTY:"${_('No records found.')}",
 
          MSG_ERROR:"${_('Data error.')}",
 
          MSG_LOADING:"${_('Loading...')}",
 
        }
 
        );
 
        myDataTable.subscribe('postRenderEvent',function(oArgs) {
 
            tooltip_activate();
 
            quick_repo_menu();
 
        });
 

	
 
        var filterTimeout = null;
 

	
 
        updateFilter  = function () {
 
            // Reset timeout
 
            filterTimeout = null;
 

	
 
            // Reset sort
 
            var state = myDataTable.getState();
 
                state.sortedBy = {key:'name', dir:YAHOO.widget.DataTable.CLASS_ASC};
 

	
 
            // Get filtered data
 
            myDataSource.sendRequest(YUD.get('q_filter').value,{
 
                success : myDataTable.onDataReturnInitializeTable,
 
                failure : myDataTable.onDataReturnInitializeTable,
 
                scope   : myDataTable,
 
                argument: state
 
            });
 

	
 
        };
 
        YUE.on('q_filter','click',function(){
 
            YUD.get('q_filter').value = '';
 
         });
 

	
 
        YUE.on('q_filter','keyup',function (e) {
 
            clearTimeout(filterTimeout);
 
            filterTimeout = setTimeout(updateFilter,600);
 
        });
 
      </script>
 
    % endif
setup.cfg
Show inline comments
 
[egg_info]
 
tag_build =
 
tag_svn_revision = true
 

	
 
[easy_install]
 
find_links = http://www.pylonshq.com/download/
 

	
 
[nosetests]
 
verbose=True
 
verbosity=2
 
with-pylons=test.ini
 
detailed-errors=1
 
nologcapture=1
 
#pdb=1
 
#pdb-failures=1
 
pdb=1
 
pdb-failures=1
 

	
 
# Babel configuration
 
[compile_catalog]
 
domain = rhodecode
 
directory = rhodecode/i18n
 
statistics = true
 

	
 
[extract_messages]
 
add_comments = TRANSLATORS:
 
output_file = rhodecode/i18n/rhodecode.pot
 
width = 80
 

	
 
[init_catalog]
 
domain = rhodecode
 
input_file = rhodecode/i18n/rhodecode.pot
 
output_dir = rhodecode/i18n
 

	
 
[update_catalog]
 
domain = rhodecode
 
input_file = rhodecode/i18n/rhodecode.pot
 
output_dir = rhodecode/i18n
 
previous = true
 

	
 
[build_sphinx]
 
source-dir = docs/
 
build-dir  = docs/_build
 
all_files  = 1
 

	
 
[upload_sphinx]
 
upload-dir = docs/_build/html
 
\ No newline at end of file
0 comments (0 inline, 0 general)