Changeset - 8ecfed1d8f8b
[Not reviewed]
rhodecode/config/conf.py
Show inline comments
 
new file 100644
 
# -*- coding: utf-8 -*-
 
"""
 
    package.rhodecode.config.conf
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Various config settings for RhodeCode
 

	
 
    :created_on: Mar 7, 2012
 
    :author: marcink
 
    :copyright: (C) 2009-2010 Marcin Kuzminski <marcin@python-works.com>
 
    :license: <name>, see LICENSE_FILE for more details.
 
"""
 
from rhodecode import EXTENSIONS
 

	
 
from rhodecode.lib.utils2 import __get_lem
 

	
 

	
 
# language map is also used by whoosh indexer, which for those specified
 
# extensions will index it's content
 
LANGUAGES_EXTENSIONS_MAP = __get_lem()
 

	
 
#==============================================================================
 
# WHOOSH INDEX EXTENSIONS
 
#==============================================================================
 
# EXTENSIONS WE WANT TO INDEX CONTENT OFF USING WHOOSH
 
INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
 

	
 
# list of readme files to search in file tree and display in summary
 
# attached weights defines the search  order lower is first
 
ALL_READMES = [
 
    ('readme', 0), ('README', 0), ('Readme', 0),
 
    ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1),
 
    ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2),
 
    ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2),
 
    ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2),
 
]
 

	
 
# extension together with weights to search lower is first
 
RST_EXTS = [
 
    ('', 0), ('.rst', 1), ('.rest', 1),
 
    ('.RST', 2), ('.REST', 2),
 
    ('.txt', 3), ('.TXT', 3)
 
]
 

	
 
MARKDOWN_EXTS = [
 
    ('.md', 1), ('.MD', 1),
 
    ('.mkdn', 2), ('.MKDN', 2),
 
    ('.mdown', 3), ('.MDOWN', 3),
 
    ('.markdown', 4), ('.MARKDOWN', 4)
 
]
 

	
 
PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)]
 

	
 
ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS
 

	
 
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
 

	
 
DATE_FORMAT = "%Y-%m-%d"
rhodecode/config/environment.py
Show inline comments
 
"""Pylons environment configuration"""
 

	
 
import os
 
import logging
 
import rhodecode
 

	
 
from mako.lookup import TemplateLookup
 
from pylons.configuration import PylonsConfig
 
from pylons.error import handle_mako_error
 

	
 
import rhodecode
 
# don't remove this import it does magic for celery
 
from rhodecode.lib import celerypylons
 

	
 
import rhodecode.lib.app_globals as app_globals
 
import rhodecode.lib.helpers
 

	
 
from rhodecode.config.routing import make_map
 
# don't remove this import it does magic for celery
 
from rhodecode.lib import celerypylons, str2bool
 
from rhodecode.lib import engine_from_config
 

	
 
from rhodecode.lib import helpers
 
from rhodecode.lib.auth import set_available_permissions
 
from rhodecode.lib.utils import repo2db_mapper, make_ui, set_rhodecode_config
 
from rhodecode.lib.utils import repo2db_mapper, make_ui, set_rhodecode_config,\
 
    load_rcextensions
 
from rhodecode.lib.utils2 import engine_from_config, str2bool
 
from rhodecode.model import init_model
 
from rhodecode.model.scm import ScmModel
 
from rhodecode.lib.vcs.utils.fakemod import create_module
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def load_environment(global_conf, app_conf, initial=False):
 
    """Configure the Pylons environment via the ``pylons.config``
 
    """
 
    Configure the Pylons environment via the ``pylons.config``
 
    object
 
    """
 
    config = PylonsConfig()
 

	
 
    # Pylons paths
 
    root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 
    paths = dict(root=root,
 
                 controllers=os.path.join(root, 'controllers'),
 
                 static_files=os.path.join(root, 'public'),
 
                 templates=[os.path.join(root, 'templates')])
 
    paths = dict(
 
        root=root,
 
        controllers=os.path.join(root, 'controllers'),
 
        static_files=os.path.join(root, 'public'),
 
        templates=[os.path.join(root, 'templates')]
 
    )
 

	
 
    # Initialize config with the basic options
 
    config.init_app(global_conf, app_conf, package='rhodecode', paths=paths)
 

	
 
    # store some globals into rhodecode
 
    rhodecode.CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
 

	
 
    config['routes.map'] = make_map(config)
 
    config['pylons.app_globals'] = app_globals.Globals(config)
 
    config['pylons.h'] = rhodecode.lib.helpers
 
    config['pylons.h'] = helpers
 
    rhodecode.CONFIG = config
 

	
 
    path = os.path.join(config['here'], 'rcextensions', '__init__.py')
 
    if os.path.isfile(path):
 
        rcext = create_module('rc', path)
 
        rhodecode.EXTENSIONS = rcext
 
        log.debug('Found rcextensions now loading %s...' % rcext)
 
    load_rcextensions(root_path=config['here'])
 

	
 
    # Setup cache object as early as possible
 
    import pylons
 
    pylons.cache._push_object(config['pylons.app_globals'].cache)
 

	
 
    # Create the Mako TemplateLookup, with the default auto-escaping
 
    config['pylons.app_globals'].mako_lookup = TemplateLookup(
 
        directories=paths['templates'],
 
        error_handler=handle_mako_error,
 
        module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
 
        input_encoding='utf-8', default_filters=['escape'],
 
        imports=['from webhelpers.html import escape'])
 

	
 
    # sets the c attribute access when don't existing attribute are accessed
 
    config['pylons.strict_tmpl_context'] = True
 
    test = os.path.split(config['__file__'])[-1] == 'test.ini'
 
    if test:
 
        from rhodecode.lib.utils import create_test_env, create_test_index
 
        from rhodecode.tests import  TESTS_TMP_PATH
 
        create_test_env(TESTS_TMP_PATH, config)
 
        create_test_index(TESTS_TMP_PATH, config, True)
 

	
 
    # MULTIPLE DB configs
 
    # Setup the SQLAlchemy database engine
 
    sa_engine_db1 = engine_from_config(config, 'sqlalchemy.db1.')
 

	
 
    init_model(sa_engine_db1)
 

	
 
    repos_path = make_ui('db').configitems('paths')[0][1]
 
    repo2db_mapper(ScmModel().repo_scan(repos_path))
 
    set_available_permissions(config)
 
    config['base_path'] = repos_path
 
    set_rhodecode_config(config)
 
    # CONFIGURATION OPTIONS HERE (note: all config options will override
 
    # any Pylons config options)
 

	
 
    # store config reference into our module to skip import magic of
 
    # pylons
 
    rhodecode.CONFIG.update(config)
 
    return config
rhodecode/config/rcextensions/make_rcextensions.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.config.rcextensions.make_rcextensions
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Whoosh indexing module for RhodeCode
 

	
 
    :created_on: Mar 6, 2012
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import os
 
import sys
 
import pkg_resources
 
import traceback
 
import logging
 
from os.path import dirname as dn, join as jn
 

	
 
#to get the rhodecode import
 
sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
 

	
 
from rhodecode.lib.utils import BasePasterCommand, Command, ask_ok
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class MakeRcExt(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    summary = "Creates additional extensions for rhodecode"
 
    group_name = "RhodeCode"
 
    takes_config_file = -1
 
    parser = Command.standard_parser(verbose=True)
 

	
 
    def command(self):
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        from pylons import config
 

	
 
        def _make_file(ext_file):
 
            bdir = os.path.split(ext_file)[0]
 
            if not os.path.isdir(bdir):
 
                os.makedirs(bdir)
 
            with open(ext_file, 'wb') as f:
 
                f.write(tmpl)
 
                log.info('Writen new extensions file to %s' % ext_file)
 

	
 
        here = config['here']
 
        tmpl = pkg_resources.resource_string(
 
            'rhodecode', jn('config', 'rcextensions', '__init__.py')
 
        )
 
        ext_file = jn(here, 'rcextensions', '__init__.py')
 
        if os.path.exists(ext_file):
 
            msg = ('Extension file already exists, do you want '
 
                   'to overwrite it ? [y/n]')
 
            if ask_ok(msg):
 
                _make_file(ext_file)
 
            else:
 
                log.info('nothing done...')
 
        else:
 
            _make_file(ext_file)
 

	
 
    def update_parser(self):
 
        pass
 

	
 

	
rhodecode/controllers/admin/users_groups.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.controllers.admin.users_groups
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Users Groups crud controller for pylons
 

	
 
    :created_on: Jan 25, 2011
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import logging
 
import traceback
 
import formencode
 

	
 
from formencode import htmlfill
 
from pylons import request, session, tmpl_context as c, url, config
 
from pylons.controllers.util import abort, redirect
 
from pylons.i18n.translation import _
 

	
 
from rhodecode.lib import helpers as h
 
from rhodecode.lib.exceptions import UsersGroupsAssignedException
 
from rhodecode.lib import helpers as h, safe_unicode
 
from rhodecode.lib.utils2 import safe_unicode
 
from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator
 
from rhodecode.lib.base import BaseController, render
 

	
 
from rhodecode.model.users_group import UsersGroupModel
 

	
 
from rhodecode.model.db import User, UsersGroup, Permission, UsersGroupToPerm
 
from rhodecode.model.forms import UsersGroupForm
 
from rhodecode.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UsersGroupsController(BaseController):
 
    """REST Controller styled on the Atom Publishing Protocol"""
 
    # To properly map this controller, ensure your config/routing.py
 
    # file has a resource setup:
 
    #     map.resource('users_group', 'users_groups')
 

	
 
    @LoginRequired()
 
    @HasPermissionAllDecorator('hg.admin')
 
    def __before__(self):
 
        c.admin_user = session.get('admin_user')
 
        c.admin_username = session.get('admin_username')
 
        super(UsersGroupsController, self).__before__()
 
        c.available_permissions = config['available_permissions']
 

	
 
    def index(self, format='html'):
 
        """GET /users_groups: All items in the collection"""
 
        # url('users_groups')
 
        c.users_groups_list = self.sa.query(UsersGroup).all()
 
        return render('admin/users_groups/users_groups.html')
 

	
 
    def create(self):
 
        """POST /users_groups: Create a new item"""
 
        # url('users_groups')
 

	
 
        users_group_form = UsersGroupForm()()
 
        try:
 
            form_result = users_group_form.to_python(dict(request.POST))
 
            UsersGroupModel().create(name=form_result['users_group_name'],
 
                                     active=form_result['users_group_active'])
 
            h.flash(_('created users group %s') \
 
                    % form_result['users_group_name'], category='success')
 
            #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa)
 
            Session.commit()
 
        except formencode.Invalid, errors:
 
            return htmlfill.render(
 
                render('admin/users_groups/users_group_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8")
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('error occurred during creation of users group %s') \
 
                    % request.POST.get('users_group_name'), category='error')
 

	
 
        return redirect(url('users_groups'))
 

	
 
    def new(self, format='html'):
 
        """GET /users_groups/new: Form to create a new item"""
 
        # url('new_users_group')
 
        return render('admin/users_groups/users_group_add.html')
 

	
 
    def update(self, id):
 
        """PUT /users_groups/id: Update an existing item"""
 
        # Forms posted to this method should contain a hidden field:
 
        #    <input type="hidden" name="_method" value="PUT" />
 
        # Or using helpers:
 
        #    h.form(url('users_group', id=ID),
 
        #           method='put')
 
        # url('users_group', id=ID)
 

	
 
        c.users_group = UsersGroup.get(id)
 
        c.group_members_obj = [x.user for x in c.users_group.members]
 
        c.group_members = [(x.user_id, x.username) for x in
 
                           c.group_members_obj]
 

	
 
        c.available_members = [(x.user_id, x.username) for x in
 
                               self.sa.query(User).all()]
 

	
 
        available_members = [safe_unicode(x[0]) for x in c.available_members]
 

	
 
        users_group_form = UsersGroupForm(edit=True,
 
                                          old_data=c.users_group.get_dict(),
 
                                          available_members=available_members)()
 

	
 
        try:
 
            form_result = users_group_form.to_python(request.POST)
 
            UsersGroupModel().update(c.users_group, form_result)
 
            h.flash(_('updated users group %s') \
 
                        % form_result['users_group_name'],
 
                    category='success')
 
            #action_logger(self.rhodecode_user, 'new_user', '', '', self.sa)
 
            Session.commit()
 
        except formencode.Invalid, errors:
 
            e = errors.error_dict or {}
 

	
 
            perm = Permission.get_by_key('hg.create.repository')
 
            e.update({'create_repo_perm':
 
                         UsersGroupModel().has_perm(id, perm)})
 

	
 
            return htmlfill.render(
 
                render('admin/users_groups/users_group_edit.html'),
 
                defaults=errors.value,
 
                errors=e,
 
                prefix_error=False,
 
                encoding="UTF-8")
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('error occurred during update of users group %s') \
 
                    % request.POST.get('users_group_name'), category='error')
 

	
 
        return redirect(url('users_groups'))
 

	
 
    def delete(self, id):
 
        """DELETE /users_groups/id: Delete an existing item"""
 
        # Forms posted to this method should contain a hidden field:
 
        #    <input type="hidden" name="_method" value="DELETE" />
 
        # Or using helpers:
 
        #    h.form(url('users_group', id=ID),
 
        #           method='delete')
 
        # url('users_group', id=ID)
 

	
 
        try:
 
            UsersGroupModel().delete(id)
 
            Session.commit()
 
            h.flash(_('successfully deleted users group'), category='success')
 
        except UsersGroupsAssignedException, e:
 
            h.flash(e, category='error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during deletion of users group'),
 
                    category='error')
 
        return redirect(url('users_groups'))
 

	
 
    def show(self, id, format='html'):
 
        """GET /users_groups/id: Show a specific item"""
 
        # url('users_group', id=ID)
 

	
 
    def edit(self, id, format='html'):
 
        """GET /users_groups/id/edit: Form to edit an existing item"""
 
        # url('edit_users_group', id=ID)
 

	
 
        c.users_group = self.sa.query(UsersGroup).get(id)
 
        if not c.users_group:
 
            return redirect(url('users_groups'))
 

	
 
        c.users_group.permissions = {}
 
        c.group_members_obj = [x.user for x in c.users_group.members]
 
        c.group_members = [(x.user_id, x.username) for x in
 
                           c.group_members_obj]
 
        c.available_members = [(x.user_id, x.username) for x in
 
                               self.sa.query(User).all()]
 
        defaults = c.users_group.get_dict()
 
        perm = Permission.get_by_key('hg.create.repository')
 
        defaults.update({'create_repo_perm':
 
                         UsersGroupModel().has_perm(c.users_group, perm)})
 
        return htmlfill.render(
 
            render('admin/users_groups/users_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    def update_perm(self, id):
 
        """PUT /users_perm/id: Update an existing item"""
 
        # url('users_group_perm', id=ID, method='put')
 

	
 
        grant_perm = request.POST.get('create_repo_perm', False)
 

	
 
        if grant_perm:
 
            perm = Permission.get_by_key('hg.create.none')
 
            UsersGroupModel().revoke_perm(id, perm)
 

	
 
            perm = Permission.get_by_key('hg.create.repository')
 
            UsersGroupModel().grant_perm(id, perm)
 
            h.flash(_("Granted 'repository create' permission to user"),
 
                    category='success')
 

	
 
            Session.commit()
 
        else:
 
            perm = Permission.get_by_key('hg.create.repository')
 
            UsersGroupModel().revoke_perm(id, perm)
 

	
 
            perm = Permission.get_by_key('hg.create.none')
 
            UsersGroupModel().grant_perm(id, perm)
 
            h.flash(_("Revoked 'repository create' permission to user"),
 
                    category='success')
 
            Session.commit()
 
        return redirect(url('edit_users_group', id=id))
rhodecode/controllers/branches.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.controllers.branches
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    branches controller for rhodecode
 

	
 
    :created_on: Apr 21, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import logging
 

	
 
from pylons import tmpl_context as c
 
import binascii
 

	
 
from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from rhodecode.lib.base import BaseRepoController, render
 
from rhodecode.lib.compat import OrderedDict
 
from rhodecode.lib import safe_unicode
 
from rhodecode.lib.utils2 import safe_unicode
 
log = logging.getLogger(__name__)
 

	
 

	
 
class BranchesController(BaseRepoController):
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def __before__(self):
 
        super(BranchesController, self).__before__()
 

	
 
    def index(self):
 

	
 
        def _branchtags(localrepo):
 
            bt_closed = {}
 
            for bn, heads in localrepo.branchmap().iteritems():
 
                tip = heads[-1]
 
                if 'close' in localrepo.changelog.read(tip)[5]:
 
                    bt_closed[bn] = tip
 
            return bt_closed
 

	
 
        cs_g = c.rhodecode_repo.get_changeset
 

	
 
        c.repo_closed_branches = {}
 
        if c.rhodecode_db_repo.repo_type == 'hg':
 
            bt_closed = _branchtags(c.rhodecode_repo._repo)
 
            _closed_branches = [(safe_unicode(n), cs_g(binascii.hexlify(h)),)
 
                                for n, h in bt_closed.items()]
 

	
 
            c.repo_closed_branches = OrderedDict(sorted(_closed_branches,
 
                                                    key=lambda ctx: ctx[0],
 
                                                    reverse=False))
 

	
 
        _branches = [(safe_unicode(n), cs_g(h))
 
                     for n, h in c.rhodecode_repo.branches.items()]
 
        c.repo_branches = OrderedDict(sorted(_branches,
 
                                             key=lambda ctx: ctx[0],
 
                                             reverse=False))
 

	
 

	
 
        return render('branches/branches.html')
rhodecode/controllers/feed.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.controllers.feed
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Feed controller for rhodecode
 

	
 
    :created_on: Apr 23, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import logging
 

	
 
from pylons import url, response, tmpl_context as c
 
from pylons.i18n.translation import _
 

	
 
from rhodecode.lib import safe_unicode
 
from rhodecode.lib.utils2 import safe_unicode
 
from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from rhodecode.lib.base import BaseRepoController
 

	
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FeedController(BaseRepoController):
 

	
 
    @LoginRequired(api_access=True)
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def __before__(self):
 
        super(FeedController, self).__before__()
 
        #common values for feeds
 
        self.description = _('Changes on %s repository')
 
        self.title = self.title = _('%s %s feed') % (c.rhodecode_name, '%s')
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        self.feed_nr = 10
 

	
 
    def _get_title(self, cs):
 
        return "R%s:%s - %s" % (
 
            cs.revision, cs.short_id, cs.message
 
        )
 

	
 
    def __changes(self, cs):
 
        changes = []
 

	
 
        a = [safe_unicode(n.path) for n in cs.added]
 
        if a:
 
            changes.append('\nA ' + '\nA '.join(a))
 

	
 
        m = [safe_unicode(n.path) for n in cs.changed]
 
        if m:
 
            changes.append('\nM ' + '\nM '.join(m))
 

	
 
        d = [safe_unicode(n.path) for n in cs.removed]
 
        if d:
 
            changes.append('\nD ' + '\nD '.join(d))
 

	
 
        changes.append('</pre>')
 

	
 
        return ''.join(changes)
 

	
 
    def atom(self, repo_name):
 
        """Produce an atom-1.0 feed via feedgenerator module"""
 
        feed = Atom1Feed(
 
             title=self.title % repo_name,
 
             link=url('summary_home', repo_name=repo_name,
 
                      qualified=True),
 
             description=self.description % repo_name,
 
             language=self.language,
 
             ttl=self.ttl
 
        )
 

	
 
        for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])):
 
            desc_msg = []
 
            desc_msg.append('%s - %s<br/><pre>' % (cs.author, cs.date))
 
            desc_msg.append(self.__changes(cs))
 

	
 
            feed.add_item(title=self._get_title(cs),
 
                          link=url('changeset_home', repo_name=repo_name,
 
                                   revision=cs.raw_id, qualified=True),
 
                          author_name=cs.author,
 
                          description=''.join(desc_msg))
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    def rss(self, repo_name):
 
        """Produce an rss2 feed via feedgenerator module"""
 
        feed = Rss201rev2Feed(
 
            title=self.title % repo_name,
 
            link=url('summary_home', repo_name=repo_name,
 
                     qualified=True),
 
            description=self.description % repo_name,
 
            language=self.language,
 
            ttl=self.ttl
 
        )
 

	
 
        for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])):
 
            desc_msg = []
 
            desc_msg.append('%s - %s<br/><pre>' % (cs.author, cs.date))
 
            desc_msg.append(self.__changes(cs))
 

	
 
            feed.add_item(title=self._get_title(cs),
 
                          link=url('changeset_home', repo_name=repo_name,
 
                                   revision=cs.raw_id, qualified=True),
 
                          author_name=cs.author,
 
                          description=''.join(desc_msg),
 
                         )
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
rhodecode/controllers/files.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.controllers.files
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Files controller for RhodeCode
 

	
 
    :created_on: Apr 21, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import logging
 
import traceback
 

	
 
from pylons import request, response, tmpl_context as c, url
 
from pylons.i18n.translation import _
 
from pylons.controllers.util import redirect
 
from pylons.decorators import jsonify
 

	
 
from rhodecode.lib.vcs.conf import settings
 
from rhodecode.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError, \
 
    EmptyRepositoryError, ImproperArchiveTypeError, VCSError, \
 
    NodeAlreadyExistsError
 
from rhodecode.lib.vcs.nodes import FileNode
 
from rhodecode.lib import diffs
 
from rhodecode.lib import helpers as h
 

	
 
from rhodecode.lib.compat import OrderedDict
 
from rhodecode.lib import convert_line_endings, detect_mode, safe_str
 
from rhodecode.lib.utils2 import convert_line_endings, detect_mode, safe_str
 
from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from rhodecode.lib.base import BaseRepoController, render
 
from rhodecode.lib.utils import EmptyChangeset
 
from rhodecode.lib import diffs
 
import rhodecode.lib.helpers as h
 
from rhodecode.lib.vcs.conf import settings
 
from rhodecode.lib.vcs.exceptions import RepositoryError, \
 
    ChangesetDoesNotExistError, EmptyRepositoryError, \
 
    ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError
 
from rhodecode.lib.vcs.nodes import FileNode
 

	
 
from rhodecode.model.repo import RepoModel
 
from rhodecode.model.scm import ScmModel
 

	
 
from rhodecode.controllers.changeset import anchor_url, _ignorews_url,\
 
    _context_url, get_line_ctx, get_ignore_ws
 
from rhodecode.lib.diffs import wrapped_diff
 
from rhodecode.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FilesController(BaseRepoController):
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(FilesController, self).__before__()
 
        c.cut_off_limit = self.cut_off_limit
 

	
 
    def __get_cs_or_redirect(self, rev, repo_name, redirect_after=True):
 
        """
 
        Safe way to get changeset if error occur it redirects to tip with
 
        proper message
 

	
 
        :param rev: revision to fetch
 
        :param repo_name: repo name to redirect after
 
        """
 

	
 
        try:
 
            return c.rhodecode_repo.get_changeset(rev)
 
        except EmptyRepositoryError, e:
 
            if not redirect_after:
 
                return None
 
            url_ = url('files_add_home',
 
                       repo_name=c.repo_name,
 
                       revision=0, f_path='')
 
            add_new = '<a href="%s">[%s]</a>' % (url_, _('add new'))
 
            h.flash(h.literal(_('There are no files yet %s' % add_new)),
 
                    category='warning')
 
            redirect(h.url('summary_home', repo_name=repo_name))
 

	
 
        except RepositoryError, e:
 
            h.flash(str(e), category='warning')
 
            redirect(h.url('files_home', repo_name=repo_name, revision='tip'))
 

	
 
    def __get_filenode_or_redirect(self, repo_name, cs, path):
 
        """
 
        Returns file_node, if error occurs or given path is directory,
 
        it'll redirect to top level path
 

	
 
        :param repo_name: repo_name
 
        :param cs: given changeset
 
        :param path: path to lookup
 
        """
 

	
 
        try:
 
            file_node = cs.get_node(path)
 
            if file_node.is_dir():
 
                raise RepositoryError('given path is a directory')
 
        except RepositoryError, e:
 
            h.flash(str(e), category='warning')
 
            redirect(h.url('files_home', repo_name=repo_name,
 
                           revision=cs.raw_id))
 

	
 
        return file_node
 

	
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name, revision, f_path):
 
        # redirect to given revision from form if given
 
        post_revision = request.POST.get('at_rev', None)
 
        if post_revision:
 
            cs = self.__get_cs_or_redirect(post_revision, repo_name)
 
            redirect(url('files_home', repo_name=c.repo_name,
 
                         revision=cs.raw_id, f_path=f_path))
 

	
 
        c.changeset = self.__get_cs_or_redirect(revision, repo_name)
 
        c.branch = request.GET.get('branch', None)
 
        c.f_path = f_path
 

	
 
        cur_rev = c.changeset.revision
 

	
 
        # prev link
 
        try:
 
            prev_rev = c.rhodecode_repo.get_changeset(cur_rev).prev(c.branch)
 
            c.url_prev = url('files_home', repo_name=c.repo_name,
 
                         revision=prev_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_prev += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_prev = '#'
 

	
 
        # next link
 
        try:
 
            next_rev = c.rhodecode_repo.get_changeset(cur_rev).next(c.branch)
 
            c.url_next = url('files_home', repo_name=c.repo_name,
 
                     revision=next_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_next += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_next = '#'
 

	
 
        # files or dirs
 
        try:
 
            c.file = c.changeset.get_node(f_path)
 

	
 
            if c.file.is_file():
 
                c.file_history = self._get_node_history(c.changeset, f_path)
 
            else:
 
                c.file_history = []
 
        except RepositoryError, e:
 
            h.flash(str(e), category='warning')
 
            redirect(h.url('files_home', repo_name=repo_name,
 
                           revision=revision))
 

	
 
        return render('files/files.html')
 

	
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def rawfile(self, repo_name, revision, f_path):
 
        cs = self.__get_cs_or_redirect(revision, repo_name)
 
        file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
 

	
 
        response.content_disposition = 'attachment; filename=%s' % \
 
            safe_str(f_path.split(os.sep)[-1])
 

	
 
        response.content_type = file_node.mimetype
 
        return file_node.content
 

	
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def raw(self, repo_name, revision, f_path):
 
        cs = self.__get_cs_or_redirect(revision, repo_name)
 
        file_node = self.__get_filenode_or_redirect(repo_name, cs, f_path)
 

	
 
        raw_mimetype_mapping = {
 
            # map original mimetype to a mimetype used for "show as raw"
 
            # you can also provide a content-disposition to override the
 
            # default "attachment" disposition.
 
            # orig_type: (new_type, new_dispo)
 

	
 
            # show images inline:
 
            'image/x-icon': ('image/x-icon', 'inline'),
 
            'image/png': ('image/png', 'inline'),
 
            'image/gif': ('image/gif', 'inline'),
 
            'image/jpeg': ('image/jpeg', 'inline'),
 
            'image/svg+xml': ('image/svg+xml', 'inline'),
 
        }
 

	
 
        mimetype = file_node.mimetype
 
        try:
 
            mimetype, dispo = raw_mimetype_mapping[mimetype]
 
        except KeyError:
 
            # we don't know anything special about this, handle it safely
 
            if file_node.is_binary:
 
                # do same as download raw for binary files
 
                mimetype, dispo = 'application/octet-stream', 'attachment'
 
            else:
 
                # do not just use the original mimetype, but force text/plain,
 
                # otherwise it would serve text/html and that might be unsafe.
 
                # Note: underlying vcs library fakes text/plain mimetype if the
 
                # mimetype can not be determined and it thinks it is not
 
                # binary.This might lead to erroneous text display in some
 
                # cases, but helps in other cases, like with text files
 
                # without extension.
 
                mimetype, dispo = 'text/plain', 'inline'
 

	
 
        if dispo == 'attachment':
 
            dispo = 'attachment; filename=%s' % \
 
                        safe_str(f_path.split(os.sep)[-1])
 

	
 
        response.content_disposition = dispo
 
        response.content_type = mimetype
 
        return file_node.content
 

	
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def annotate(self, repo_name, revision, f_path):
 
        c.cs = self.__get_cs_or_redirect(revision, repo_name)
 
        c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path)
 

	
 
        c.file_history = self._get_node_history(c.cs, f_path)
 
        c.f_path = f_path
 
        return render('files/files_annotate.html')
 

	
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def edit(self, repo_name, revision, f_path):
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs_or_redirect(revision, repo_name)
 
        c.file = self.__get_filenode_or_redirect(repo_name, c.cs, f_path)
 

	
 
        if c.file.is_binary:
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                         revision=c.cs.raw_id, f_path=f_path))
 

	
 
        c.f_path = f_path
 

	
 
        if r_post:
 

	
 
            old_content = c.file.content
 
            sl = old_content.splitlines(1)
 
            first_line = sl[0] if sl else ''
 
            # modes:  0 - Unix, 1 - Mac, 2 - DOS
 
            mode = detect_mode(first_line, 0)
 
            content = convert_line_endings(r_post.get('content'), mode)
 

	
 
            message = r_post.get('message') or (_('Edited %s via RhodeCode')
 
                                                % (f_path))
 
            author = self.rhodecode_user.full_contact
 

	
 
            if content == old_content:
 
                h.flash(_('No changes'),
 
                    category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 

	
 
            try:
 
                self.scm_model.commit_change(repo=c.rhodecode_repo,
 
                                             repo_name=repo_name, cs=c.cs,
 
                                             user=self.rhodecode_user,
 
                                             author=author, message=message,
 
                                             content=content, f_path=f_path)
 
                h.flash(_('Successfully committed to %s' % f_path),
 
                        category='success')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_edit.html')
 

	
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def add(self, repo_name, revision, f_path):
 
        r_post = request.POST
 
        c.cs = self.__get_cs_or_redirect(revision, repo_name,
 
                                         redirect_after=False)
 
        if c.cs is None:
 
            c.cs = EmptyChangeset(alias=c.rhodecode_repo.alias)
 

	
 
        c.f_path = f_path
 

	
 
        if r_post:
 
            unix_mode = 0
 
            content = convert_line_endings(r_post.get('content'), unix_mode)
 

	
 
            message = r_post.get('message') or (_('Added %s via RhodeCode')
 
                                                % (f_path))
 
            location = r_post.get('location')
 
            filename = r_post.get('filename')
 
            file_obj = r_post.get('upload_file', None)
 

	
 
            if file_obj is not None and hasattr(file_obj, 'filename'):
 
                filename = file_obj.filename
 
                content = file_obj.file
 

	
 
            node_path = os.path.join(location, filename)
 
            author = self.rhodecode_user.full_contact
 

	
 
            if not content:
 
                h.flash(_('No content'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            if not filename:
 
                h.flash(_('No filename'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 

	
 
            try:
 
                self.scm_model.create_node(repo=c.rhodecode_repo,
 
                                             repo_name=repo_name, cs=c.cs,
 
                                             user=self.rhodecode_user,
 
                                             author=author, message=message,
 
                                             content=content, f_path=node_path)
 
                h.flash(_('Successfully committed to %s' % node_path),
 
                        category='success')
 
            except NodeAlreadyExistsError, e:
 
                h.flash(_(e), category='error')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_add.html')
 

	
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def archivefile(self, repo_name, fname):
 

	
 
        fileformat = None
 
        revision = None
 
        ext = None
 
        subrepos = request.GET.get('subrepos') == 'true'
 

	
 
        for a_type, ext_data in settings.ARCHIVE_SPECS.items():
 
            archive_spec = fname.split(ext_data[1])
 
            if len(archive_spec) == 2 and archive_spec[1] == '':
 
                fileformat = a_type or ext_data[1]
 
                revision = archive_spec[0]
 
                ext = ext_data[1]
 

	
 
        try:
 
            dbrepo = RepoModel().get_by_repo_name(repo_name)
 
            if dbrepo.enable_downloads is False:
 
                return _('downloads disabled')
 

	
 
            if c.rhodecode_repo.alias == 'hg':
 
                # patch and reset hooks section of UI config to not run any
 
                # hooks on fetching archives with subrepos
 
                for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'):
 
                    c.rhodecode_repo._repo.ui.setconfig('hooks', k, None)
 

	
 
            cs = c.rhodecode_repo.get_changeset(revision)
 
            content_type = settings.ARCHIVE_SPECS[fileformat][0]
 
        except ChangesetDoesNotExistError:
 
            return _('Unknown revision %s') % revision
 
        except EmptyRepositoryError:
 
            return _('Empty repository')
 
        except (ImproperArchiveTypeError, KeyError):
 
            return _('Unknown archive type')
 

	
 
        response.content_type = content_type
 
        response.content_disposition = 'attachment; filename=%s-%s%s' \
 
            % (repo_name, revision, ext)
 

	
 
        import tempfile
 
        archive = tempfile.mkstemp()[1]
 
        t = open(archive, 'wb')
 
        cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos)
 

	
 
        def get_chunked_archive(archive):
 
            stream = open(archive, 'rb')
 
            while True:
 
                data = stream.read(4096)
 
                if not data:
 
                    os.remove(archive)
 
                    break
 
                yield data
 

	
 
        return get_chunked_archive(archive)
 

	
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def diff(self, repo_name, f_path):
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = request.GET.get('context', 3)
 
        diff1 = request.GET.get('diff1', '')
 
        diff2 = request.GET.get('diff2', '')
 
        c.action = request.GET.get('diff')
 
        c.no_changes = diff1 == diff2
 
        c.f_path = f_path
 
        c.big_diff = False
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.changes = OrderedDict()
 
        c.changes[diff2] = []
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.rhodecode_repo.get_changeset(diff1)
 
                node1 = c.changeset_1.get_node(f_path)
 
            else:
 
                c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo)
 
                node1 = FileNode('.', '', changeset=c.changeset_1)
 

	
 
            if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_2 = c.rhodecode_repo.get_changeset(diff2)
 
                node2 = c.changeset_2.get_node(f_path)
 
            else:
 
                c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo)
 
                node2 = FileNode('.', '', changeset=c.changeset_2)
 
        except RepositoryError:
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                                f_path=f_path))
 

	
 
        if c.action == 'download':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 

	
 
            diff_name = '%s_vs_%s.diff' % (diff1, diff2)
 
            response.content_type = 'text/plain'
 
            response.content_disposition = (
 
                'attachment; filename=%s' % diff_name
 
            )
 
            return diff.raw_diff()
 

	
 
        elif c.action == 'raw':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 
            response.content_type = 'text/plain'
 
            return diff.raw_diff()
 

	
 
        else:
 
            fid = h.FID(diff2, node2.path)
 
            line_context_lcl = get_line_ctx(fid, request.GET)
 
            ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
 

	
 
            lim = request.GET.get('fulldiff') or self.cut_off_limit
 
            _, cs1, cs2, diff, st = wrapped_diff(filenode_old=node1,
 
            _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1,
 
                                         filenode_new=node2,
 
                                         cut_off_limit=lim,
 
                                         ignore_whitespace=ign_whitespace_lcl,
 
                                         line_context=line_context_lcl,
 
                                         enable_comments=False)
 

	
 
            c.changes = [('', node2, diff, cs1, cs2, st,)]
 

	
 
        return render('files/file_diff.html')
 

	
 
    def _get_node_history(self, cs, f_path):
 
        changesets = cs.get_file_history(f_path)
 
        hist_l = []
 

	
 
        changesets_group = ([], _("Changesets"))
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 
        _hg = cs.repository.alias == 'hg'
 
        for chs in changesets:
 
            _branch = '(%s)' % chs.branch if _hg else ''
 
            n_desc = 'r%s:%s %s' % (chs.revision, chs.short_id, _branch)
 
            changesets_group[0].append((chs.raw_id, n_desc,))
 

	
 
        hist_l.append(changesets_group)
 

	
 
        for name, chs in c.rhodecode_repo.branches.items():
 
            branches_group[0].append((chs, name),)
 
        hist_l.append(branches_group)
 

	
 
        for name, chs in c.rhodecode_repo.tags.items():
 
            tags_group[0].append((chs, name),)
 
        hist_l.append(tags_group)
 

	
 
        return hist_l
 

	
 
    @jsonify
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def nodelist(self, repo_name, revision, f_path):
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            cs = self.__get_cs_or_redirect(revision, repo_name)
 
            _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
 
                                          flat=False)
 
            return _d + _f
rhodecode/controllers/summary.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.controllers.summary
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Summary controller for Rhodecode
 

	
 
    :created_on: Apr 18, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import traceback
 
import calendar
 
import logging
 
import urllib
 
from time import mktime
 
from datetime import timedelta, date
 
from urlparse import urlparse
 
from rhodecode.lib.compat import product
 

	
 
from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
 
    NodeDoesNotExistError
 

	
 
from pylons import tmpl_context as c, request, url, config
 
from pylons.i18n.translation import _
 

	
 
from beaker.cache import cache_region, region_invalidate
 

	
 
from rhodecode.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP
 
from rhodecode.model.db import Statistics, CacheInvalidation
 
from rhodecode.lib import ALL_READMES, ALL_EXTS, safe_unicode
 
from rhodecode.lib.utils2 import safe_unicode
 
from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from rhodecode.lib.base import BaseRepoController, render
 
from rhodecode.lib.utils import EmptyChangeset
 
from rhodecode.lib.markup_renderer import MarkupRenderer
 
from rhodecode.lib.celerylib import run_task
 
from rhodecode.lib.celerylib.tasks import get_commits_stats, \
 
    LANGUAGES_EXTENSIONS_MAP
 
from rhodecode.lib.celerylib.tasks import get_commits_stats
 
from rhodecode.lib.helpers import RepoPage
 
from rhodecode.lib.compat import json, OrderedDict
 

	
 
log = logging.getLogger(__name__)
 

	
 
README_FILES = [''.join([x[0][0], x[1][0]]) for x in
 
                    sorted(list(product(ALL_READMES, ALL_EXTS)),
 
                           key=lambda y:y[0][1] + y[1][1])]
 

	
 

	
 
class SummaryController(BaseRepoController):
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def __before__(self):
 
        super(SummaryController, self).__before__()
 

	
 
    def index(self, repo_name):
 
        c.dbrepo = dbrepo = c.rhodecode_db_repo
 
        c.following = self.scm_model.is_following_repo(repo_name,
 
                                                self.rhodecode_user.user_id)
 

	
 
        def url_generator(**kw):
 
            return url('shortlog_home', repo_name=repo_name, size=10, **kw)
 

	
 
        c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
 
                                     items_per_page=10, url=url_generator)
 

	
 
        if self.rhodecode_user.username == 'default':
 
            # for default(anonymous) user we don't need to pass credentials
 
            username = ''
 
            password = ''
 
        else:
 
            username = str(self.rhodecode_user.username)
 
            password = '@'
 

	
 
        parsed_url = urlparse(url.current(qualified=True))
 

	
 
        default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
 

	
 
        uri_tmpl = config.get('clone_uri', default_clone_uri)
 
        uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        uri_dict = {
 
           'user': username,
 
           'pass': password,
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'path': decoded_path
 
        }
 

	
 
        uri = uri_tmpl % uri_dict
 
        # generate another clone url by id
 
        uri_dict.update(
 
         {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
 
        )
 
        uri_id = uri_tmpl % uri_dict
 

	
 
        c.clone_repo_url = uri
 
        c.clone_repo_url_id = uri_id
 
        c.repo_tags = OrderedDict()
 
        for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
 
            try:
 
                c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
 
            except ChangesetError:
 
                c.repo_tags[name] = EmptyChangeset(hash_)
 

	
 
        c.repo_branches = OrderedDict()
 
        for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
 
            try:
 
                c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
 
            except ChangesetError:
 
                c.repo_branches[name] = EmptyChangeset(hash_)
 

	
 
        td = date.today() + timedelta(days=1)
 
        td_1m = td - timedelta(days=calendar.mdays[td.month])
 
        td_1y = td - timedelta(days=365)
 

	
 
        ts_min_m = mktime(td_1m.timetuple())
 
        ts_min_y = mktime(td_1y.timetuple())
 
        ts_max_y = mktime(td.timetuple())
 

	
 
        if dbrepo.enable_statistics:
 
            c.show_stats = True
 
            c.no_data_msg = _('No data loaded yet')
 
            run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
 
        else:
 
            c.show_stats = False
 
            c.no_data_msg = _('Statistics are disabled for this repository')
 
        c.ts_min = ts_min_m
 
        c.ts_max = ts_max_y
 

	
 
        stats = self.sa.query(Statistics)\
 
            .filter(Statistics.repository == dbrepo)\
 
            .scalar()
 

	
 
        c.stats_percentage = 0
 

	
 
        if stats and stats.languages:
 
            c.no_data = False is dbrepo.enable_statistics
 
            lang_stats_d = json.loads(stats.languages)
 
            c.commit_data = stats.commit_activity
 
            c.overview_data = stats.commit_activity_combined
 

	
 
            lang_stats = ((x, {"count": y,
 
                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
 
                          for x, y in lang_stats_d.items())
 

	
 
            c.trending_languages = json.dumps(
 
                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
 
            )
 
            last_rev = stats.stat_on_revision + 1
 
            c.repo_last_rev = c.rhodecode_repo.count()\
 
                if c.rhodecode_repo.revisions else 0
 
            if last_rev == 0 or c.repo_last_rev == 0:
 
                pass
 
            else:
 
                c.stats_percentage = '%.2f' % ((float((last_rev)) /
 
                                                c.repo_last_rev) * 100)
 
        else:
 
            c.commit_data = json.dumps({})
 
            c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
 
            c.trending_languages = json.dumps({})
 
            c.no_data = True
 

	
 
        c.enable_downloads = dbrepo.enable_downloads
 
        if c.enable_downloads:
 
            c.download_options = self._get_download_links(c.rhodecode_repo)
 

	
 
        c.readme_data, c.readme_file = self.__get_readme_data(c.rhodecode_db_repo)
 
        return render('summary/summary.html')
 

	
 
    def __get_readme_data(self, repo):
 

	
 
        @cache_region('long_term')
 
        def _get_readme_from_cache(key):
 
            readme_data = None
 
            readme_file = None
 
            log.debug('Fetching readme file')
 
            try:
 
                cs = repo.get_changeset('tip')
 
                renderer = MarkupRenderer()
 
                for f in README_FILES:
 
                    try:
 
                        readme = cs.get_node(f)
 
                        readme_file = f
 
                        readme_data = renderer.render(readme.content, f)
 
                        log.debug('Found readme %s' % readme_file)
 
                        break
 
                    except NodeDoesNotExistError:
 
                        continue
 
            except ChangesetError:
 
                pass
 
            except EmptyRepositoryError:
 
                pass
 
            except Exception:
 
                log.error(traceback.format_exc())
 

	
 
            return readme_data, readme_file
 

	
 
        key = repo.repo_name + '_README'
 
        inv = CacheInvalidation.invalidate(key)
 
        if inv is not None:
 
            region_invalidate(_get_readme_from_cache, None, key)
 
            CacheInvalidation.set_valid(inv.cache_key)
 
        return _get_readme_from_cache(key)
 

	
 
    def _get_download_links(self, repo):
 

	
 
        download_l = []
 

	
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 

	
 
        for name, chs in c.rhodecode_repo.branches.items():
 
            #chs = chs.split(':')[-1]
 
            branches_group[0].append((chs, name),)
 
        download_l.append(branches_group)
 

	
 
        for name, chs in c.rhodecode_repo.tags.items():
 
            #chs = chs.split(':')[-1]
 
            tags_group[0].append((chs, name),)
 
        download_l.append(tags_group)
 

	
 
        return download_l
rhodecode/lib/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.__init__
 
    ~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Some simple helper functions
 

	
 
    :created_on: Jan 5, 2011
 
    :author: marcink
 
    :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import re
 
from rhodecode import EXTENSIONS
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
def __get_lem():
 
    from pygments import lexers
 
    from string import lower
 
    from collections import defaultdict
 

	
 
    d = defaultdict(lambda: [])
 

	
 
    def __clean(s):
 
        s = s.lstrip('*')
 
        s = s.lstrip('.')
 

	
 
        if s.find('[') != -1:
 
            exts = []
 
            start, stop = s.find('['), s.find(']')
 

	
 
            for suffix in s[start + 1:stop]:
 
                exts.append(s[:s.find('[')] + suffix)
 
            return map(lower, exts)
 
        else:
 
            return map(lower, [s])
 

	
 
    for lx, t in sorted(lexers.LEXERS.items()):
 
        m = map(__clean, t[-2])
 
        if m:
 
            m = reduce(lambda x, y: x + y, m)
 
            for ext in m:
 
                desc = lx.replace('Lexer', '')
 
                d[ext].append(desc)
 

	
 
    return dict(d)
 

	
 
# language map is also used by whoosh indexer, which for those specified
 
# extensions will index it's content
 
LANGUAGES_EXTENSIONS_MAP = __get_lem()
 

	
 
# Additional mappings that are not present in the pygments lexers
 
LANGUAGES_EXTENSIONS_MAP.update(getattr(EXTENSIONS, 'EXTRA_MAPPINGS', {}))
 

	
 
#==============================================================================
 
# WHOOSH INDEX EXTENSIONS
 
#==============================================================================
 
# EXTENSIONS WE WANT TO INDEX CONTENT OFF USING WHOOSH
 
INDEX_EXTENSIONS = LANGUAGES_EXTENSIONS_MAP.keys()
 

	
 
#OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
 

	
 
if getattr(EXTENSIONS, 'INDEX_EXTENSIONS', []) != []:
 
    INDEX_EXTENSIONS = getattr(EXTENSIONS, 'INDEX_EXTENSIONS', [])
 

	
 
#ADDITIONAL MAPPINGS
 
INDEX_EXTENSIONS.extend(getattr(EXTENSIONS, 'EXTRA_INDEX_EXTENSIONS', []))
 

	
 
# list of readme files to search in file tree and display in summary
 
# attached weights defines the search  order lower is first
 
ALL_READMES = [
 
    ('readme', 0), ('README', 0), ('Readme', 0),
 
    ('doc/readme', 1), ('doc/README', 1), ('doc/Readme', 1),
 
    ('Docs/readme', 2), ('Docs/README', 2), ('Docs/Readme', 2),
 
    ('DOCS/readme', 2), ('DOCS/README', 2), ('DOCS/Readme', 2),
 
    ('docs/readme', 2), ('docs/README', 2), ('docs/Readme', 2),
 
]
 

	
 
# extension together with weights to search lower is first
 
RST_EXTS = [
 
    ('', 0), ('.rst', 1), ('.rest', 1),
 
    ('.RST', 2), ('.REST', 2),
 
    ('.txt', 3), ('.TXT', 3)
 
]
 

	
 
MARKDOWN_EXTS = [
 
    ('.md', 1), ('.MD', 1),
 
    ('.mkdn', 2), ('.MKDN', 2),
 
    ('.mdown', 3), ('.MDOWN', 3),
 
    ('.markdown', 4), ('.MARKDOWN', 4)
 
]
 

	
 
PLAIN_EXTS = [('.text', 2), ('.TEXT', 2)]
 

	
 
ALL_EXTS = MARKDOWN_EXTS + RST_EXTS + PLAIN_EXTS
 

	
 

	
 
def str2bool(_str):
 
    """
 
    returs True/False value from given string, it tries to translate the
 
    string into boolean
 

	
 
    :param _str: string value to translate into boolean
 
    :rtype: boolean
 
    :returns: boolean from given string
 
    """
 
    if _str is None:
 
        return False
 
    if _str in (True, False):
 
        return _str
 
    _str = str(_str).strip().lower()
 
    return _str in ('t', 'true', 'y', 'yes', 'on', '1')
 

	
 

	
 
def convert_line_endings(line, mode):
 
    """
 
    Converts a given line  "line end" accordingly to given mode
 

	
 
    Available modes are::
 
        0 - Unix
 
        1 - Mac
 
        2 - DOS
 

	
 
    :param line: given line to convert
 
    :param mode: mode to convert to
 
    :rtype: str
 
    :return: converted line according to mode
 
    """
 
    from string import replace
 

	
 
    if mode == 0:
 
            line = replace(line, '\r\n', '\n')
 
            line = replace(line, '\r', '\n')
 
    elif mode == 1:
 
            line = replace(line, '\r\n', '\r')
 
            line = replace(line, '\n', '\r')
 
    elif mode == 2:
 
            line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
 
    return line
 

	
 

	
 
def detect_mode(line, default):
 
    """
 
    Detects line break for given line, if line break couldn't be found
 
    given default value is returned
 

	
 
    :param line: str line
 
    :param default: default
 
    :rtype: int
 
    :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
 
    """
 
    if line.endswith('\r\n'):
 
        return 2
 
    elif line.endswith('\n'):
 
        return 0
 
    elif line.endswith('\r'):
 
        return 1
 
    else:
 
        return default
 

	
 

	
 
def generate_api_key(username, salt=None):
 
    """
 
    Generates unique API key for given username, if salt is not given
 
    it'll be generated from some random string
 

	
 
    :param username: username as string
 
    :param salt: salt to hash generate KEY
 
    :rtype: str
 
    :returns: sha1 hash from username+salt
 
    """
 
    from tempfile import _RandomNameSequence
 
    import hashlib
 

	
 
    if salt is None:
 
        salt = _RandomNameSequence().next()
 

	
 
    return hashlib.sha1(username + salt).hexdigest()
 

	
 

	
 
def safe_unicode(str_, from_encoding=None):
 
    """
 
    safe unicode function. Does few trick to turn str_ into unicode
 

	
 
    In case of UnicodeDecode error we try to return it with encoding detected
 
    by chardet library if it fails fallback to unicode with errors replaced
 

	
 
    :param str_: string to decode
 
    :rtype: unicode
 
    :returns: unicode object
 
    """
 
    if isinstance(str_, unicode):
 
        return str_
 

	
 
    if not from_encoding:
 
        import rhodecode
 
        DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8')
 
        from_encoding = DEFAULT_ENCODING
 

	
 
    try:
 
        return unicode(str_)
 
    except UnicodeDecodeError:
 
        pass
 

	
 
    try:
 
        return unicode(str_, from_encoding)
 
    except UnicodeDecodeError:
 
        pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(str_)['encoding']
 
        if encoding is None:
 
            raise Exception()
 
        return str_.decode(encoding)
 
    except (ImportError, UnicodeDecodeError, Exception):
 
        return unicode(str_, from_encoding, 'replace')
 

	
 

	
 
def safe_str(unicode_, to_encoding=None):
 
    """
 
    safe str function. Does few trick to turn unicode_ into string
 

	
 
    In case of UnicodeEncodeError we try to return it with encoding detected
 
    by chardet library if it fails fallback to string with errors replaced
 

	
 
    :param unicode_: unicode to encode
 
    :rtype: str
 
    :returns: str object
 
    """
 

	
 
    # if it's not basestr cast to str
 
    if not isinstance(unicode_, basestring):
 
        return str(unicode_)
 

	
 
    if isinstance(unicode_, str):
 
        return unicode_
 

	
 
    if not to_encoding:
 
        import rhodecode
 
        DEFAULT_ENCODING = rhodecode.CONFIG.get('default_encoding','utf8')
 
        to_encoding = DEFAULT_ENCODING
 

	
 
    try:
 
        return unicode_.encode(to_encoding)
 
    except UnicodeEncodeError:
 
        pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(unicode_)['encoding']
 
        print encoding
 
        if encoding is None:
 
            raise UnicodeEncodeError()
 

	
 
        return unicode_.encode(encoding)
 
    except (ImportError, UnicodeEncodeError):
 
        return unicode_.encode(to_encoding, 'replace')
 

	
 
    return safe_str
 

	
 

	
 
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
 
    """
 
    Custom engine_from_config functions that makes sure we use NullPool for
 
    file based sqlite databases. This prevents errors on sqlite. This only
 
    applies to sqlalchemy versions < 0.7.0
 

	
 
    """
 
    import sqlalchemy
 
    from sqlalchemy import engine_from_config as efc
 
    import logging
 

	
 
    if int(sqlalchemy.__version__.split('.')[1]) < 7:
 

	
 
        # This solution should work for sqlalchemy < 0.7.0, and should use
 
        # proxy=TimerProxy() for execution time profiling
 

	
 
        from sqlalchemy.pool import NullPool
 
        url = configuration[prefix + 'url']
 

	
 
        if url.startswith('sqlite'):
 
            kwargs.update({'poolclass': NullPool})
 
        return efc(configuration, prefix, **kwargs)
 
    else:
 
        import time
 
        from sqlalchemy import event
 
        from sqlalchemy.engine import Engine
 

	
 
        log = logging.getLogger('sqlalchemy.engine')
 
        BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = xrange(30, 38)
 
        engine = efc(configuration, prefix, **kwargs)
 

	
 
        def color_sql(sql):
 
            COLOR_SEQ = "\033[1;%dm"
 
            COLOR_SQL = YELLOW
 
            normal = '\x1b[0m'
 
            return ''.join([COLOR_SEQ % COLOR_SQL, sql, normal])
 

	
 
        if configuration['debug']:
 
            #attach events only for debug configuration
 

	
 
            def before_cursor_execute(conn, cursor, statement,
 
                                    parameters, context, executemany):
 
                context._query_start_time = time.time()
 
                log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
 

	
 

	
 
            def after_cursor_execute(conn, cursor, statement,
 
                                    parameters, context, executemany):
 
                total = time.time() - context._query_start_time
 
                log.info(color_sql("<<<<< TOTAL TIME: %f <<<<<" % total))
 

	
 
            event.listen(engine, "before_cursor_execute",
 
                         before_cursor_execute)
 
            event.listen(engine, "after_cursor_execute",
 
                         after_cursor_execute)
 

	
 
    return engine
 

	
 

	
 
def age(curdate):
 
    """
 
    turns a datetime into an age string.
 

	
 
    :param curdate: datetime object
 
    :rtype: unicode
 
    :returns: unicode words describing age
 
    """
 

	
 
    from datetime import datetime
 
    from webhelpers.date import time_ago_in_words
 

	
 
    _ = lambda s: s
 

	
 
    if not curdate:
 
        return ''
 

	
 
    agescales = [(_(u"year"), 3600 * 24 * 365),
 
                 (_(u"month"), 3600 * 24 * 30),
 
                 (_(u"day"), 3600 * 24),
 
                 (_(u"hour"), 3600),
 
                 (_(u"minute"), 60),
 
                 (_(u"second"), 1), ]
 

	
 
    age = datetime.now() - curdate
 
    age_seconds = (age.days * agescales[2][1]) + age.seconds
 
    pos = 1
 
    for scale in agescales:
 
        if scale[1] <= age_seconds:
 
            if pos == 6:
 
                pos = 5
 
            return '%s %s' % (time_ago_in_words(curdate,
 
                                                agescales[pos][0]), _('ago'))
 
        pos += 1
 

	
 
    return _(u'just now')
 

	
 

	
 
def uri_filter(uri):
 
    """
 
    Removes user:password from given url string
 

	
 
    :param uri:
 
    :rtype: unicode
 
    :returns: filtered list of strings
 
    """
 
    if not uri:
 
        return ''
 

	
 
    proto = ''
 

	
 
    for pat in ('https://', 'http://'):
 
        if uri.startswith(pat):
 
            uri = uri[len(pat):]
 
            proto = pat
 
            break
 

	
 
    # remove passwords and username
 
    uri = uri[uri.find('@') + 1:]
 

	
 
    # get the port
 
    cred_pos = uri.find(':')
 
    if cred_pos == -1:
 
        host, port = uri, None
 
    else:
 
        host, port = uri[:cred_pos], uri[cred_pos + 1:]
 

	
 
    return filter(None, [proto, host, port])
 

	
 

	
 
def credentials_filter(uri):
 
    """
 
    Returns a url with removed credentials
 

	
 
    :param uri:
 
    """
 

	
 
    uri = uri_filter(uri)
 
    #check if we have port
 
    if len(uri) > 2 and uri[2]:
 
        uri[2] = ':' + uri[2]
 

	
 
    return ''.join(uri)
 

	
 

	
 
def get_changeset_safe(repo, rev):
 
    """
 
    Safe version of get_changeset if this changeset doesn't exists for a
 
    repo it returns a Dummy one instead
 

	
 
    :param repo:
 
    :param rev:
 
    """
 
    from rhodecode.lib.vcs.backends.base import BaseRepository
 
    from rhodecode.lib.vcs.exceptions import RepositoryError
 
    if not isinstance(repo, BaseRepository):
 
        raise Exception('You must pass an Repository '
 
                        'object as first argument got %s', type(repo))
 

	
 
    try:
 
        cs = repo.get_changeset(rev)
 
    except RepositoryError:
 
        from rhodecode.lib.utils import EmptyChangeset
 
        cs = EmptyChangeset(requested_revision=rev)
 
    return cs
 

	
 

	
 
def get_current_revision(quiet=False):
 
    """
 
    Returns tuple of (number, id) from repository containing this package
 
    or None if repository could not be found.
 

	
 
    :param quiet: prints error for fetching revision if True
 
    """
 

	
 
    try:
 
        from rhodecode.lib.vcs import get_repo
 
        from rhodecode.lib.vcs.utils.helpers import get_scm
 
        repopath = os.path.join(os.path.dirname(__file__), '..', '..')
 
        scm = get_scm(repopath)[0]
 
        repo = get_repo(path=repopath, alias=scm)
 
        tip = repo.get_changeset()
 
        return (tip.revision, tip.short_id)
 
    except Exception, err:
 
        if not quiet:
 
            print ("Cannot retrieve rhodecode's revision. Original error "
 
                   "was: %s" % err)
 
        return None
 

	
 

	
 
def extract_mentioned_users(s):
 
    """
 
    Returns unique usernames from given string s that have @mention
 

	
 
    :param s: string to get mentions
 
    """
 
    usrs = {}
 
    for username in re.findall(r'(?:^@|\s@)(\w+)', s):
 
        usrs[username] = username
 

	
 
    return sorted(usrs.keys())
rhodecode/lib/auth.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.auth
 
    ~~~~~~~~~~~~~~~~~~
 

	
 
    authentication and permission libraries
 

	
 
    :created_on: Apr 4, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import random
 
import logging
 
import traceback
 
import hashlib
 

	
 
from tempfile import _RandomNameSequence
 
from decorator import decorator
 

	
 
from pylons import config, url, request
 
from pylons.controllers.util import abort, redirect
 
from pylons.i18n.translation import _
 

	
 
from rhodecode import __platform__, PLATFORM_WIN, PLATFORM_OTHERS
 
from rhodecode.model.meta import Session
 

	
 
if __platform__ in PLATFORM_WIN:
 
    from hashlib import sha256
 
if __platform__ in PLATFORM_OTHERS:
 
    import bcrypt
 

	
 
from rhodecode.lib import str2bool, safe_unicode
 
from rhodecode.lib.utils2 import str2bool, safe_unicode
 
from rhodecode.lib.exceptions import LdapPasswordError, LdapUsernameError
 
from rhodecode.lib.utils import get_repo_slug, get_repos_group_slug
 
from rhodecode.lib.auth_ldap import AuthLdap
 

	
 
from rhodecode.model import meta
 
from rhodecode.model.user import UserModel
 
from rhodecode.model.db import Permission, RhodeCodeSetting, User
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PasswordGenerator(object):
 
    """
 
    This is a simple class for generating password from different sets of
 
    characters
 
    usage::
 

	
 
        passwd_gen = PasswordGenerator()
 
        #print 8-letter password containing only big and small letters
 
            of alphabet
 
        print passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
 
    """
 
    ALPHABETS_NUM = r'''1234567890'''
 
    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
 
    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
 
    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
 
    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
 
        + ALPHABETS_NUM + ALPHABETS_SPECIAL
 
    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
 
    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
 
    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
 
    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
 

	
 
    def __init__(self, passwd=''):
 
        self.passwd = passwd
 

	
 
    def gen_password(self, length, type_=None):
 
        if type_ is None:
 
            type_ = self.ALPHABETS_FULL
 
        self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
 
        return self.passwd
 

	
 

	
 
class RhodeCodeCrypto(object):
 

	
 
    @classmethod
 
    def hash_string(cls, str_):
 
        """
 
        Cryptographic function used for password hashing based on pybcrypt
 
        or pycrypto in windows
 

	
 
        :param password: password to hash
 
        """
 
        if __platform__ in PLATFORM_WIN:
 
            return sha256(str_).hexdigest()
 
        elif __platform__ in PLATFORM_OTHERS:
 
            return bcrypt.hashpw(str_, bcrypt.gensalt(10))
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 
    @classmethod
 
    def hash_check(cls, password, hashed):
 
        """
 
        Checks matching password with it's hashed value, runs different
 
        implementation based on platform it runs on
 

	
 
        :param password: password
 
        :param hashed: password in hashed form
 
        """
 

	
 
        if __platform__ in PLATFORM_WIN:
 
            return sha256(password).hexdigest() == hashed
 
        elif __platform__ in PLATFORM_OTHERS:
 
            return bcrypt.hashpw(password, hashed) == hashed
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 

	
 
def get_crypt_password(password):
 
    return RhodeCodeCrypto.hash_string(password)
 

	
 

	
 
def check_password(password, hashed):
 
    return RhodeCodeCrypto.hash_check(password, hashed)
 

	
 

	
 
def generate_api_key(str_, salt=None):
 
    """
 
    Generates API KEY from given string
 

	
 
    :param str_:
 
    :param salt:
 
    """
 

	
 
    if salt is None:
 
        salt = _RandomNameSequence().next()
 

	
 
    return hashlib.sha1(str_ + salt).hexdigest()
 

	
 

	
 
def authfunc(environ, username, password):
 
    """
 
    Dummy authentication wrapper function used in Mercurial and Git for
 
    access control.
 

	
 
    :param environ: needed only for using in Basic auth
 
    """
 
    return authenticate(username, password)
 

	
 

	
 
def authenticate(username, password):
 
    """
 
    Authentication function used for access control,
 
    firstly checks for db authentication then if ldap is enabled for ldap
 
    authentication, also creates ldap user if not in database
 

	
 
    :param username: username
 
    :param password: password
 
    """
 

	
 
    user_model = UserModel()
 
    user = User.get_by_username(username)
 

	
 
    log.debug('Authenticating user using RhodeCode account')
 
    if user is not None and not user.ldap_dn:
 
        if user.active:
 
            if user.username == 'default' and user.active:
 
                log.info('user %s authenticated correctly as anonymous user' %
 
                         username)
 
                return True
 

	
 
            elif user.username == username and check_password(password,
 
                                                              user.password):
 
                log.info('user %s authenticated correctly' % username)
 
                return True
 
        else:
 
            log.warning('user %s tried auth but is disabled' % username)
 

	
 
    else:
 
        log.debug('Regular authentication failed')
 
        user_obj = User.get_by_username(username, case_insensitive=True)
 

	
 
        if user_obj is not None and not user_obj.ldap_dn:
 
            log.debug('this user already exists as non ldap')
 
            return False
 

	
 
        ldap_settings = RhodeCodeSetting.get_ldap_settings()
 
        #======================================================================
 
        # FALLBACK TO LDAP AUTH IF ENABLE
 
        #======================================================================
 
        if str2bool(ldap_settings.get('ldap_active')):
 
            log.debug("Authenticating user using ldap")
 
            kwargs = {
 
                  'server': ldap_settings.get('ldap_host', ''),
 
                  'base_dn': ldap_settings.get('ldap_base_dn', ''),
 
                  'port': ldap_settings.get('ldap_port'),
 
                  'bind_dn': ldap_settings.get('ldap_dn_user'),
 
                  'bind_pass': ldap_settings.get('ldap_dn_pass'),
 
                  'tls_kind': ldap_settings.get('ldap_tls_kind'),
 
                  'tls_reqcert': ldap_settings.get('ldap_tls_reqcert'),
 
                  'ldap_filter': ldap_settings.get('ldap_filter'),
 
                  'search_scope': ldap_settings.get('ldap_search_scope'),
 
                  'attr_login': ldap_settings.get('ldap_attr_login'),
 
                  'ldap_version': 3,
 
                  }
 
            log.debug('Checking for ldap authentication')
 
            try:
 
                aldap = AuthLdap(**kwargs)
 
                (user_dn, ldap_attrs) = aldap.authenticate_ldap(username,
 
                                                                password)
 
                log.debug('Got ldap DN response %s' % user_dn)
 

	
 
                get_ldap_attr = lambda k: ldap_attrs.get(ldap_settings\
 
                                                           .get(k), [''])[0]
 

	
 
                user_attrs = {
 
                 'name': safe_unicode(get_ldap_attr('ldap_attr_firstname')),
 
                 'lastname': safe_unicode(get_ldap_attr('ldap_attr_lastname')),
 
                 'email': get_ldap_attr('ldap_attr_email'),
 
                }
 

	
 
                # don't store LDAP password since we don't need it. Override
 
                # with some random generated password
 
                _password = PasswordGenerator().gen_password(length=8)
 
                # create this user on the fly if it doesn't exist in rhodecode
 
                # database
 
                if user_model.create_ldap(username, _password, user_dn,
 
                                          user_attrs):
 
                    log.info('created new ldap user %s' % username)
 

	
 
                Session.commit()
 
                return True
 
            except (LdapUsernameError, LdapPasswordError,):
 
                pass
 
            except (Exception,):
 
                log.error(traceback.format_exc())
 
                pass
 
    return False
 

	
 

	
 
def login_container_auth(username):
 
    user = User.get_by_username(username)
 
    if user is None:
 
        user_attrs = {
 
            'name': username,
 
            'lastname': None,
 
            'email': None,
 
        }
 
        user = UserModel().create_for_container_auth(username, user_attrs)
 
        if not user:
 
            return None
 
        log.info('User %s was created by container authentication' % username)
 

	
 
    if not user.active:
 
        return None
 

	
 
    user.update_lastlogin()
 
    Session.commit()
 

	
 
    log.debug('User %s is now logged in by container authentication',
 
              user.username)
 
    return user
 

	
 

	
 
def get_container_username(environ, config):
 
    username = None
 

	
 
    if str2bool(config.get('container_auth_enabled', False)):
 
        from paste.httpheaders import REMOTE_USER
 
        username = REMOTE_USER(environ)
 

	
 
    if not username and str2bool(config.get('proxypass_auth_enabled', False)):
 
        username = environ.get('HTTP_X_FORWARDED_USER')
 

	
 
    if username:
 
        # Removing realm and domain from username
 
        username = username.partition('@')[0]
 
        username = username.rpartition('\\')[2]
 
        log.debug('Received username %s from container' % username)
 

	
 
    return username
 

	
 

	
 
class CookieStoreWrapper(object):
 

	
 
    def __init__(self, cookie_store):
 
        self.cookie_store = cookie_store
 

	
 
    def __repr__(self):
 
        return 'CookieStore<%s>' % (self.cookie_store)
 

	
 
    def get(self, key, other=None):
 
        if isinstance(self.cookie_store, dict):
 
            return self.cookie_store.get(key, other)
 
        elif isinstance(self.cookie_store, AuthUser):
 
            return self.cookie_store.__dict__.get(key, other)
 

	
 

	
 
class  AuthUser(object):
 
    """
 
    A simple object that handles all attributes of user in RhodeCode
 

	
 
    It does lookup based on API key,given user, or user present in session
 
    Then it fills all required information for such user. It also checks if
 
    anonymous access is enabled and if so, it returns default user as logged
 
    in
 
    """
 

	
 
    def __init__(self, user_id=None, api_key=None, username=None):
 

	
 
        self.user_id = user_id
 
        self.api_key = None
 
        self.username = username
 

	
 
        self.name = ''
 
        self.lastname = ''
 
        self.email = ''
 
        self.is_authenticated = False
 
        self.admin = False
 
        self.permissions = {}
 
        self._api_key = api_key
 
        self.propagate_data()
 
        self._instance = None
 

	
 
    def propagate_data(self):
 
        user_model = UserModel()
 
        self.anonymous_user = User.get_by_username('default', cache=True)
 
        is_user_loaded = False
 

	
 
        # try go get user by api key
 
        if self._api_key and self._api_key != self.anonymous_user.api_key:
 
            log.debug('Auth User lookup by API KEY %s' % self._api_key)
 
            is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
 
        # lookup by userid
 
        elif (self.user_id is not None and
 
              self.user_id != self.anonymous_user.user_id):
 
            log.debug('Auth User lookup by USER ID %s' % self.user_id)
 
            is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
 
        # lookup by username
 
        elif self.username and \
 
            str2bool(config.get('container_auth_enabled', False)):
 

	
 
            log.debug('Auth User lookup by USER NAME %s' % self.username)
 
            dbuser = login_container_auth(self.username)
 
            if dbuser is not None:
 
                for k, v in dbuser.get_dict().items():
 
                    setattr(self, k, v)
 
                self.set_authenticated()
 
                is_user_loaded = True
 
        else:
 
            log.debug('No data in %s that could been used to log in' % self)
 

	
 
        if not is_user_loaded:
 
            # if we cannot authenticate user try anonymous
 
            if self.anonymous_user.active is True:
 
                user_model.fill_data(self, user_id=self.anonymous_user.user_id)
 
                # then we set this user is logged in
 
                self.is_authenticated = True
 
            else:
 
                self.user_id = None
 
                self.username = None
 
                self.is_authenticated = False
 

	
 
        if not self.username:
 
            self.username = 'None'
 

	
 
        log.debug('Auth User is now %s' % self)
 
        user_model.fill_perms(self)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    def __repr__(self):
 
        return "<AuthUser('id:%s:%s|%s')>" % (self.user_id, self.username,
 
                                              self.is_authenticated)
 

	
 
    def set_authenticated(self, authenticated=True):
 
        if self.user_id != self.anonymous_user.user_id:
 
            self.is_authenticated = authenticated
 

	
 
    def get_cookie_store(self):
 
        return {'username': self.username,
 
                'user_id': self.user_id,
 
                'is_authenticated': self.is_authenticated}
 

	
 
    @classmethod
 
    def from_cookie_store(cls, cookie_store):
 
        """
 
        Creates AuthUser from a cookie store
 

	
 
        :param cls:
 
        :param cookie_store:
 
        """
 
        user_id = cookie_store.get('user_id')
 
        username = cookie_store.get('username')
 
        api_key = cookie_store.get('api_key')
 
        return AuthUser(user_id, api_key, username)
 

	
 

	
 
def set_available_permissions(config):
 
    """
 
    This function will propagate pylons globals with all available defined
 
    permission given in db. We don't want to check each time from db for new
 
    permissions since adding a new permission also requires application restart
 
    ie. to decorate new views with the newly created permission
 

	
 
    :param config: current pylons config instance
 

	
 
    """
 
    log.info('getting information about all available permissions')
 
    try:
 
        sa = meta.Session
 
        all_perms = sa.query(Permission).all()
 
    except Exception:
 
        pass
 
    finally:
 
        meta.Session.remove()
 

	
 
    config['available_permissions'] = [x.permission_name for x in all_perms]
 

	
 

	
 
#==============================================================================
 
# CHECK DECORATORS
 
#==============================================================================
 
class LoginRequired(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page
 

	
 
    :param api_access: if enabled this checks only for valid auth token
 
        and grants access based on valid token
 
    """
 

	
 
    def __init__(self, api_access=False):
 
        self.api_access = api_access
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        user = cls.rhodecode_user
 

	
 
        api_access_ok = False
 
        if self.api_access:
 
            log.debug('Checking API KEY access for %s' % cls)
 
            if user.api_key == request.GET.get('api_key'):
 
                api_access_ok = True
 
            else:
 
                log.debug("API KEY token not valid")
 
        loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
 
        log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
 
        if user.is_authenticated or api_access_ok:
 
            log.info('user %s is authenticated and granted access to %s' % (
 
                       user.username, loc)
 
            )
 
            return func(*fargs, **fkwargs)
 
        else:
 
            log.warn('user %s NOT authenticated on func: %s' % (
 
                user, loc)
 
            )
 
            p = url.current()
 

	
 
            log.debug('redirecting to login page with %s' % p)
 
            return redirect(url('login_home', came_from=p))
 

	
 

	
 
class NotAnonymous(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page"""
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        self.user = cls.rhodecode_user
 

	
 
        log.debug('Checking if user is not anonymous @%s' % cls)
 

	
 
        anonymous = self.user.username == 'default'
 

	
 
        if anonymous:
 
            p = url.current()
 

	
 
            import rhodecode.lib.helpers as h
 
            h.flash(_('You need to be a registered user to '
 
                      'perform this action'),
 
                    category='warning')
 
            return redirect(url('login_home', came_from=p))
 
        else:
 
            return func(*fargs, **fkwargs)
 

	
 

	
 
class PermsDecorator(object):
 
    """Base class for controller decorators"""
 

	
 
    def __init__(self, *required_perms):
 
        available_perms = config['available_permissions']
 
        for perm in required_perms:
 
            if perm not in available_perms:
 
                raise Exception("'%s' permission is not defined" % perm)
 
        self.required_perms = set(required_perms)
 
        self.user_perms = None
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        self.user = cls.rhodecode_user
 
        self.user_perms = self.user.permissions
 
        log.debug('checking %s permissions %s for %s %s',
 
           self.__class__.__name__, self.required_perms, cls,
 
               self.user)
 

	
 
        if self.check_permissions():
 
            log.debug('Permission granted for %s %s' % (cls, self.user))
 
            return func(*fargs, **fkwargs)
 

	
 
        else:
 
            log.debug('Permission denied for %s %s' % (cls, self.user))
 
            anonymous = self.user.username == 'default'
 

	
 
            if anonymous:
 
                p = url.current()
 

	
 
                import rhodecode.lib.helpers as h
 
                h.flash(_('You need to be a signed in to '
 
                          'view this page'),
 
                        category='warning')
 
                return redirect(url('login_home', came_from=p))
 

	
 
            else:
 
                # redirect with forbidden ret code
 
                return abort(403)
 

	
 
    def check_permissions(self):
 
        """Dummy function for overriding"""
 
        raise Exception('You have to write this function in child class')
 

	
 

	
 
class HasPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates. All of them
 
    have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        if self.required_perms.issubset(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates. In order to
 
    fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        if self.required_perms.intersection(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    repository. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        repo_name = get_repo_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories'][repo_name]])
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    repository. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        repo_name = get_repo_slug(request)
 

	
 
        try:
 
            user_perms = set([self.user_perms['repositories'][repo_name]])
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasReposGroupPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    repository. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_repos_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories_groups'][group_name]])
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasReposGroupPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    repository. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_repos_group_slug(request)
 

	
 
        try:
 
            user_perms = set([self.user_perms['repositories_groups'][group_name]])
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# CHECK FUNCTIONS
 
#==============================================================================
 
class PermsFunction(object):
 
    """Base function for other check functions"""
 

	
 
    def __init__(self, *perms):
 
        available_perms = config['available_permissions']
 

	
 
        for perm in perms:
 
            if perm not in available_perms:
 
                raise Exception("'%s' permission is not defined" % perm)
 
        self.required_perms = set(perms)
 
        self.user_perms = None
 
        self.granted_for = ''
 
        self.repo_name = None
 

	
 
    def __call__(self, check_Location=''):
 
        user = request.user
 
        log.debug('checking %s %s %s', self.__class__.__name__,
 
                  self.required_perms, user)
 
        if not user:
 
            log.debug('Empty request user')
 
            return False
 
        self.user_perms = user.permissions
 
        self.granted_for = user
 

	
 
        if self.check_permissions():
 
            log.debug('Permission granted %s @ %s', self.granted_for,
 
                      check_Location or 'unspecified location')
 
            return True
 

	
 
        else:
 
            log.debug('Permission denied for %s @ %s', self.granted_for,
 
                        check_Location or 'unspecified location')
 
            return False
 

	
 
    def check_permissions(self):
 
        """Dummy function for overriding"""
 
        raise Exception('You have to write this function in child class')
 

	
 

	
 
class HasPermissionAll(PermsFunction):
 
    def check_permissions(self):
 
        if self.required_perms.issubset(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAny(PermsFunction):
 
    def check_permissions(self):
 
        if self.required_perms.intersection(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAll(PermsFunction):
 

	
 
    def __call__(self, repo_name=None, check_Location=''):
 
        self.repo_name = repo_name
 
        return super(HasRepoPermissionAll, self).__call__(check_Location)
 

	
 
    def check_permissions(self):
 
        if not self.repo_name:
 
            self.repo_name = get_repo_slug(request)
 

	
 
        try:
 
            self.user_perms = set(
 
                [self.user_perms['repositories'][self.repo_name]]
 
            )
 
        except KeyError:
 
            return False
 
        self.granted_for = self.repo_name
 
        if self.required_perms.issubset(self.user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAny(PermsFunction):
 

	
 
    def __call__(self, repo_name=None, check_Location=''):
 
        self.repo_name = repo_name
 
        return super(HasRepoPermissionAny, self).__call__(check_Location)
 

	
 
    def check_permissions(self):
 
        if not self.repo_name:
 
            self.repo_name = get_repo_slug(request)
 

	
 
        try:
 
            self.user_perms = set(
 
                [self.user_perms['repositories'][self.repo_name]]
 
            )
 
        except KeyError:
 
            return False
 
        self.granted_for = self.repo_name
 
        if self.required_perms.intersection(self.user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasReposGroupPermissionAny(PermsFunction):
 
    def __call__(self, group_name=None, check_Location=''):
 
        self.group_name = group_name
 
        return super(HasReposGroupPermissionAny, self).__call__(check_Location)
 

	
 
    def check_permissions(self):
 
        try:
 
            self.user_perms = set(
 
                [self.user_perms['repositories_groups'][self.group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        self.granted_for = self.repo_name
 
        if self.required_perms.intersection(self.user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasReposGroupPermissionAll(PermsFunction):
 
    def __call__(self, group_name=None, check_Location=''):
 
        self.group_name = group_name
 
        return super(HasReposGroupPermissionAny, self).__call__(check_Location)
 

	
 
    def check_permissions(self):
 
        try:
 
            self.user_perms = set(
 
                [self.user_perms['repositories_groups'][self.group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        self.granted_for = self.repo_name
 
        if self.required_perms.issubset(self.user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
 
#==============================================================================
 
class HasPermissionAnyMiddleware(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, user, repo_name):
 
        # repo_name MUST be unicode, since we handle keys in permission
 
        # dict by unicode
 
        repo_name = safe_unicode(repo_name)
 
        usr = AuthUser(user.user_id)
 
        try:
 
            self.user_perms = set([usr.permissions['repositories'][repo_name]])
 
        except Exception:
 
            log.error('Exception while accessing permissions %s' % 
 
            log.error('Exception while accessing permissions %s' %
 
                      traceback.format_exc())
 
            self.user_perms = set()
 
        self.granted_for = ''
 
        self.username = user.username
 
        self.repo_name = repo_name
 
        return self.check_permissions()
 

	
 
    def check_permissions(self):
 
        log.debug('checking mercurial protocol '
 
                  'permissions %s for user:%s repository:%s', self.user_perms,
 
                                                self.username, self.repo_name)
 
        if self.required_perms.intersection(self.user_perms):
 
            log.debug('permission granted')
 
            return True
 
        log.debug('permission denied')
 
        return False
rhodecode/lib/base.py
Show inline comments
 
"""The base Controller API
 

	
 
Provides the BaseController class for subclassing.
 
"""
 
import logging
 
import time
 
import traceback
 

	
 
from paste.auth.basic import AuthBasicAuthenticator
 

	
 
from pylons import config, tmpl_context as c, request, session, url
 
from pylons.controllers import WSGIController
 
from pylons.controllers.util import redirect
 
from pylons.templating import render_mako as render
 

	
 
from rhodecode import __version__, BACKENDS
 

	
 
from rhodecode.lib import str2bool, safe_unicode
 
from rhodecode.lib.utils2 import str2bool, safe_unicode
 
from rhodecode.lib.auth import AuthUser, get_container_username, authfunc,\
 
    HasPermissionAnyMiddleware, CookieStoreWrapper
 
from rhodecode.lib.utils import get_repo_slug, invalidate_cache
 
from rhodecode.model import meta
 

	
 
from rhodecode.model.db import Repository
 
from rhodecode.model.notification import NotificationModel
 
from rhodecode.model.scm import ScmModel
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class BaseVCSController(object):
 

	
 
    def __init__(self, application, config):
 
        self.application = application
 
        self.config = config
 
        # base path of repo locations
 
        self.basepath = self.config['base_path']
 
        #authenticate this mercurial request using authfunc
 
        self.authenticate = AuthBasicAuthenticator('', authfunc)
 
        self.ipaddr = '0.0.0.0'
 

	
 
    def _handle_request(self, environ, start_response):
 
        raise NotImplementedError()
 

	
 
    def _get_by_id(self, repo_name):
 
        """
 
        Get's a special pattern _<ID> from clone url and tries to replace it
 
        with a repository_name for support of _<ID> non changable urls
 

	
 
        :param repo_name:
 
        """
 
        try:
 
            data = repo_name.split('/')
 
            if len(data) >= 2:
 
                by_id = data[1].split('_')
 
                if len(by_id) == 2 and by_id[1].isdigit():
 
                    _repo_name = Repository.get(by_id[1]).repo_name
 
                    data[1] = _repo_name
 
        except:
 
            log.debug('Failed to extract repo_name from id %s' % (
 
                      traceback.format_exc()
 
                      )
 
            )
 

	
 
        return '/'.join(data)
 

	
 
    def _invalidate_cache(self, repo_name):
 
        """
 
        Set's cache for this repository for invalidation on next access
 

	
 
        :param repo_name: full repo name, also a cache key
 
        """
 
        invalidate_cache('get_repo_cached_%s' % repo_name)
 

	
 
    def _check_permission(self, action, user, repo_name):
 
        """
 
        Checks permissions using action (push/pull) user and repository
 
        name
 

	
 
        :param action: push or pull action
 
        :param user: user instance
 
        :param repo_name: repository name
 
        """
 
        if action == 'push':
 
            if not HasPermissionAnyMiddleware('repository.write',
 
                                              'repository.admin')(user,
 
                                                                  repo_name):
 
                return False
 

	
 
        else:
 
            #any other action need at least read permission
 
            if not HasPermissionAnyMiddleware('repository.read',
 
                                              'repository.write',
 
                                              'repository.admin')(user,
 
                                                                  repo_name):
 
                return False
 

	
 
        return True
 

	
 
    def __call__(self, environ, start_response):
 
        start = time.time()
 
        try:
 
            return self._handle_request(environ, start_response)
 
        finally:
 
            log = logging.getLogger('rhodecode.' + self.__class__.__name__)
 
            log.debug('Request time: %.3fs' % (time.time() - start))
 
            meta.Session.remove()
 

	
 

	
 
class BaseController(WSGIController):
 

	
 
    def __before__(self):
 
        c.rhodecode_version = __version__
 
        c.rhodecode_instanceid = config.get('instance_id')
 
        c.rhodecode_name = config.get('rhodecode_title')
 
        c.use_gravatar = str2bool(config.get('use_gravatar'))
 
        c.ga_code = config.get('rhodecode_ga_code')
 
        c.repo_name = get_repo_slug(request)
 
        c.backends = BACKENDS.keys()
 
        c.unread_notifications = NotificationModel()\
 
                        .get_unread_cnt_for_user(c.rhodecode_user.user_id)
 
        self.cut_off_limit = int(config.get('cut_off_limit'))
 

	
 
        self.sa = meta.Session
 
        self.scm_model = ScmModel(self.sa)
 

	
 
    def __call__(self, environ, start_response):
 
        """Invoke the Controller"""
 
        # WSGIController.__call__ dispatches to the Controller method
 
        # the request is routed to. This routing information is
 
        # available in environ['pylons.routes_dict']
 
        start = time.time()
 
        try:
 
            # make sure that we update permissions each time we call controller
 
            api_key = request.GET.get('api_key')
 
            cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
 
            user_id = cookie_store.get('user_id', None)
 
            username = get_container_username(environ, config)
 
            auth_user = AuthUser(user_id, api_key, username)
 
            request.user = auth_user
 
            self.rhodecode_user = c.rhodecode_user = auth_user
 
            if not self.rhodecode_user.is_authenticated and \
 
                       self.rhodecode_user.user_id is not None:
 
                self.rhodecode_user.set_authenticated(
 
                    cookie_store.get('is_authenticated')
 
                )
 
            log.info('User: %s accessed %s' % (
 
                auth_user, safe_unicode(environ.get('PATH_INFO')))
 
            )
 
            return WSGIController.__call__(self, environ, start_response)
 
        finally:
 
            log.info('Request to %s time: %.3fs' % (
 
                safe_unicode(environ.get('PATH_INFO')), time.time() - start)
 
            )
 
            meta.Session.remove()
 

	
 

	
 
class BaseRepoController(BaseController):
 
    """
 
    Base class for controllers responsible for loading all needed data for
 
    repository loaded items are
 

	
 
    c.rhodecode_repo: instance of scm repository
 
    c.rhodecode_db_repo: instance of db
 
    c.repository_followers: number of followers
 
    c.repository_forks: number of forks
 
    """
 

	
 
    def __before__(self):
 
        super(BaseRepoController, self).__before__()
 
        if c.repo_name:
 

	
 
            c.rhodecode_db_repo = Repository.get_by_repo_name(c.repo_name)
 
            c.rhodecode_repo = c.rhodecode_db_repo.scm_instance
 

	
 
            if c.rhodecode_repo is None:
 
                log.error('%s this repository is present in database but it '
 
                          'cannot be created as an scm instance', c.repo_name)
 

	
 
                redirect(url('home'))
 

	
 
            c.repository_followers = self.scm_model.get_followers(c.repo_name)
 
            c.repository_forks = self.scm_model.get_forks(c.repo_name)
rhodecode/lib/caching_query.py
Show inline comments
 
"""caching_query.py
 

	
 
Represent persistence structures which allow the usage of
 
Beaker caching with SQLAlchemy.
 

	
 
The three new concepts introduced here are:
 

	
 
 * CachingQuery - a Query subclass that caches and
 
   retrieves results in/from Beaker.
 
 * FromCache - a query option that establishes caching
 
   parameters on a Query
 
 * RelationshipCache - a variant of FromCache which is specific
 
   to a query invoked during a lazy load.
 
 * _params_from_query - extracts value parameters from
 
   a Query.
 

	
 
The rest of what's here are standard SQLAlchemy and
 
Beaker constructs.
 

	
 
"""
 
import beaker
 
from beaker.exceptions import BeakerException
 

	
 
from sqlalchemy.orm.interfaces import MapperOption
 
from sqlalchemy.orm.query import Query
 
from sqlalchemy.sql import visitors
 
from rhodecode.lib import safe_str
 
from rhodecode.lib.utils2 import safe_str
 

	
 

	
 
class CachingQuery(Query):
 
    """A Query subclass which optionally loads full results from a Beaker
 
    cache region.
 

	
 
    The CachingQuery stores additional state that allows it to consult
 
    a Beaker cache before accessing the database:
 

	
 
    * A "region", which is a cache region argument passed to a
 
      Beaker CacheManager, specifies a particular cache configuration
 
      (including backend implementation, expiration times, etc.)
 
    * A "namespace", which is a qualifying name that identifies a
 
      group of keys within the cache.  A query that filters on a name
 
      might use the name "by_name", a query that filters on a date range
 
      to a joined table might use the name "related_date_range".
 

	
 
    When the above state is present, a Beaker cache is retrieved.
 

	
 
    The "namespace" name is first concatenated with
 
    a string composed of the individual entities and columns the Query
 
    requests, i.e. such as ``Query(User.id, User.name)``.
 

	
 
    The Beaker cache is then loaded from the cache manager based
 
    on the region and composed namespace.  The key within the cache
 
    itself is then constructed against the bind parameters specified
 
    by this query, which are usually literals defined in the
 
    WHERE clause.
 

	
 
    The FromCache and RelationshipCache mapper options below represent
 
    the "public" method of configuring this state upon the CachingQuery.
 

	
 
    """
 

	
 
    def __init__(self, manager, *args, **kw):
 
        self.cache_manager = manager
 
        Query.__init__(self, *args, **kw)
 

	
 
    def __iter__(self):
 
        """override __iter__ to pull results from Beaker
 
           if particular attributes have been configured.
 

	
 
           Note that this approach does *not* detach the loaded objects from
 
           the current session. If the cache backend is an in-process cache
 
           (like "memory") and lives beyond the scope of the current session's
 
           transaction, those objects may be expired. The method here can be
 
           modified to first expunge() each loaded item from the current
 
           session before returning the list of items, so that the items
 
           in the cache are not the same ones in the current Session.
 

	
 
        """
 
        if hasattr(self, '_cache_parameters'):
 
            return self.get_value(createfunc=lambda:
 
                                  list(Query.__iter__(self)))
 
        else:
 
            return Query.__iter__(self)
 

	
 
    def invalidate(self):
 
        """Invalidate the value represented by this Query."""
 

	
 
        cache, cache_key = _get_cache_parameters(self)
 
        cache.remove(cache_key)
 

	
 
    def get_value(self, merge=True, createfunc=None):
 
        """Return the value from the cache for this query.
 

	
 
        Raise KeyError if no value present and no
 
        createfunc specified.
 

	
 
        """
 
        cache, cache_key = _get_cache_parameters(self)
 
        ret = cache.get_value(cache_key, createfunc=createfunc)
 
        if merge:
 
            ret = self.merge_result(ret, load=False)
 
        return ret
 

	
 
    def set_value(self, value):
 
        """Set the value in the cache for this query."""
 

	
 
        cache, cache_key = _get_cache_parameters(self)
 
        cache.put(cache_key, value)
 

	
 

	
 
def query_callable(manager, query_cls=CachingQuery):
 
    def query(*arg, **kw):
 
        return query_cls(manager, *arg, **kw)
 
    return query
 

	
 

	
 
def get_cache_region(name, region):
 
    if region not in beaker.cache.cache_regions:
 
        raise BeakerException('Cache region `%s` not configured '
 
            'Check if proper cache settings are in the .ini files' % region)
 
    kw = beaker.cache.cache_regions[region]
 
    return beaker.cache.Cache._get_cache(name, kw)
 

	
 

	
 
def _get_cache_parameters(query):
 
    """For a query with cache_region and cache_namespace configured,
 
    return the correspoinding Cache instance and cache key, based
 
    on this query's current criterion and parameter values.
 

	
 
    """
 
    if not hasattr(query, '_cache_parameters'):
 
        raise ValueError("This Query does not have caching "
 
                         "parameters configured.")
 

	
 
    region, namespace, cache_key = query._cache_parameters
 

	
 
    namespace = _namespace_from_query(namespace, query)
 

	
 
    if cache_key is None:
 
        # cache key - the value arguments from this query's parameters.
 
        args = [safe_str(x) for x in _params_from_query(query)]
 
        args.extend(filter(lambda k: k not in ['None', None, u'None'],
 
                           [str(query._limit), str(query._offset)]))
 

	
 
        cache_key = " ".join(args)
 

	
 
    if cache_key is None:
 
        raise Exception('Cache key cannot be None')
 

	
 
    # get cache
 
    #cache = query.cache_manager.get_cache_region(namespace, region)
 
    cache = get_cache_region(namespace, region)
 
    # optional - hash the cache_key too for consistent length
 
    # import uuid
 
    # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
 

	
 
    return cache, cache_key
 

	
 

	
 
def _namespace_from_query(namespace, query):
 
    # cache namespace - the token handed in by the
 
    # option + class we're querying against
 
    namespace = " ".join([namespace] + [str(x) for x in query._entities])
 

	
 
    # memcached wants this
 
    namespace = namespace.replace(' ', '_')
 

	
 
    return namespace
 

	
 

	
 
def _set_cache_parameters(query, region, namespace, cache_key):
 

	
 
    if hasattr(query, '_cache_parameters'):
 
        region, namespace, cache_key = query._cache_parameters
 
        raise ValueError("This query is already configured "
 
                        "for region %r namespace %r" %
 
                        (region, namespace)
 
                    )
 
    query._cache_parameters = region, namespace, cache_key
 

	
 

	
 
class FromCache(MapperOption):
 
    """Specifies that a Query should load results from a cache."""
 

	
 
    propagate_to_loaders = False
 

	
 
    def __init__(self, region, namespace, cache_key=None):
 
        """Construct a new FromCache.
 

	
 
        :param region: the cache region.  Should be a
 
        region configured in the Beaker CacheManager.
 

	
 
        :param namespace: the cache namespace.  Should
 
        be a name uniquely describing the target Query's
 
        lexical structure.
 

	
 
        :param cache_key: optional.  A string cache key
 
        that will serve as the key to the query.   Use this
 
        if your query has a huge amount of parameters (such
 
        as when using in_()) which correspond more simply to
 
        some other identifier.
 

	
 
        """
 
        self.region = region
 
        self.namespace = namespace
 
        self.cache_key = cache_key
 

	
 
    def process_query(self, query):
 
        """Process a Query during normal loading operation."""
 

	
 
        _set_cache_parameters(query, self.region, self.namespace,
 
                              self.cache_key)
 

	
 

	
 
class RelationshipCache(MapperOption):
 
    """Specifies that a Query as called within a "lazy load"
 
       should load results from a cache."""
 

	
 
    propagate_to_loaders = True
 

	
 
    def __init__(self, region, namespace, attribute):
 
        """Construct a new RelationshipCache.
 

	
 
        :param region: the cache region.  Should be a
 
        region configured in the Beaker CacheManager.
 

	
 
        :param namespace: the cache namespace.  Should
 
        be a name uniquely describing the target Query's
 
        lexical structure.
 

	
 
        :param attribute: A Class.attribute which
 
        indicates a particular class relationship() whose
 
        lazy loader should be pulled from the cache.
 

	
 
        """
 
        self.region = region
 
        self.namespace = namespace
 
        self._relationship_options = {
 
            (attribute.property.parent.class_, attribute.property.key): self
 
        }
 

	
 
    def process_query_conditionally(self, query):
 
        """Process a Query that is used within a lazy loader.
 

	
 
        (the process_query_conditionally() method is a SQLAlchemy
 
        hook invoked only within lazyload.)
 

	
 
        """
 
        if query._current_path:
 
            mapper, key = query._current_path[-2:]
 

	
 
            for cls in mapper.class_.__mro__:
 
                if (cls, key) in self._relationship_options:
 
                    relationship_option = \
 
                        self._relationship_options[(cls, key)]
 
                    _set_cache_parameters(
 
                            query,
 
                            relationship_option.region,
 
                            relationship_option.namespace,
 
                            None)
 

	
 
    def and_(self, option):
 
        """Chain another RelationshipCache option to this one.
 

	
 
        While many RelationshipCache objects can be specified on a single
 
        Query separately, chaining them together allows for a more efficient
 
        lookup during load.
 

	
 
        """
 
        self._relationship_options.update(option._relationship_options)
 
        return self
 

	
 

	
 
def _params_from_query(query):
 
    """Pull the bind parameter values from a query.
 

	
 
    This takes into account any scalar attribute bindparam set up.
 

	
 
    E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
 
    would return [5, 7].
 

	
 
    """
 
    v = []
 
    def visit_bindparam(bind):
 

	
 
        if bind.key in query._params:
 
            value = query._params[bind.key]
 
        elif bind.callable:
 
            # lazyloader may dig a callable in here, intended
 
            # to late-evaluate params after autoflush is called.
 
            # convert to a scalar value.
 
            value = bind.callable()
 
        else:
 
            value = bind.value
 

	
 
        v.append(value)
 
    if query._criterion is not None:
 
        visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam})
 
    for f in query._from_obj:
 
        visitors.traverse(f, {}, {'bindparam':visit_bindparam})
 
    return v
rhodecode/lib/celerylib/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.celerylib.__init__
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    celery libs for RhodeCode
 

	
 
    :created_on: Nov 27, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import sys
 
import socket
 
import traceback
 
import logging
 
from os.path import dirname as dn, join as jn
 
from pylons import config
 

	
 
from hashlib import md5
 
from decorator import decorator
 

	
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode import CELERY_ON
 
from rhodecode.lib import str2bool, safe_str
 
from rhodecode.lib.utils2 import str2bool, safe_str
 
from rhodecode.lib.pidlock import DaemonLock, LockHeld
 
from rhodecode.model import init_model
 
from rhodecode.model import meta
 
from rhodecode.model.db import Statistics, Repository, User
 

	
 
from sqlalchemy import engine_from_config
 

	
 
from celery.messaging import establish_connection
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ResultWrapper(object):
 
    def __init__(self, task):
 
        self.task = task
 

	
 
    @LazyProperty
 
    def result(self):
 
        return self.task
 

	
 

	
 
def run_task(task, *args, **kwargs):
 
    if CELERY_ON:
 
        try:
 
            t = task.apply_async(args=args, kwargs=kwargs)
 
            log.info('running task %s:%s' % (t.task_id, task))
 
            return t
 

	
 
        except socket.error, e:
 
            if isinstance(e, IOError) and e.errno == 111:
 
                log.debug('Unable to connect to celeryd. Sync execution')
 
            else:
 
                log.error(traceback.format_exc())
 
        except KeyError, e:
 
                log.debug('Unable to connect to celeryd. Sync execution')
 
        except Exception, e:
 
            log.error(traceback.format_exc())
 

	
 
    log.debug('executing task %s in sync mode' % task)
 
    return ResultWrapper(task(*args, **kwargs))
 

	
 

	
 
def __get_lockkey(func, *fargs, **fkwargs):
 
    params = list(fargs)
 
    params.extend(['%s-%s' % ar for ar in fkwargs.items()])
 

	
 
    func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
 

	
 
    lockkey = 'task_%s.lock' % \
 
        md5(func_name + '-' + '-'.join(map(safe_str, params))).hexdigest()
 
    return lockkey
 

	
 

	
 
def locked_task(func):
 
    def __wrapper(func, *fargs, **fkwargs):
 
        lockkey = __get_lockkey(func, *fargs, **fkwargs)
 
        lockkey_path = config['here']
 

	
 
        log.info('running task with lockkey %s' % lockkey)
 
        try:
 
            l = DaemonLock(file_=jn(lockkey_path, lockkey))
 
            ret = func(*fargs, **fkwargs)
 
            l.release()
 
            return ret
 
        except LockHeld:
 
            log.info('LockHeld')
 
            return 'Task with key %s already running' % lockkey
 

	
 
    return decorator(__wrapper, func)
 

	
 

	
 
def get_session():
 
    if CELERY_ON:
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 
    sa = meta.Session
 
    return sa
 

	
 

	
 
def dbsession(func):
 
    def __wrapper(func, *fargs, **fkwargs):
 
        try:
 
            ret = func(*fargs, **fkwargs)
 
            return ret
 
        finally:
 
            if CELERY_ON:
 
                meta.Session.remove()
 

	
 
    return decorator(__wrapper, func)
rhodecode/lib/celerylib/tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.celerylib.tasks
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    RhodeCode task modules, containing all task that suppose to be run
 
    by celery daemon
 

	
 
    :created_on: Oct 6, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
from celery.decorators import task
 

	
 
import os
 
import traceback
 
import logging
 
from os.path import join as jn
 

	
 
from time import mktime
 
from operator import itemgetter
 
from string import lower
 

	
 
from pylons import config, url
 
from pylons.i18n.translation import _
 

	
 
from rhodecode.lib.vcs import get_backend
 

	
 
from rhodecode import CELERY_ON
 
from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, safe_str
 
from rhodecode.lib.utils2 import safe_str
 
from rhodecode.lib.celerylib import run_task, locked_task, dbsession, \
 
    str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
 
from rhodecode.lib.helpers import person
 
from rhodecode.lib.rcmail.smtp_mailer import SmtpMailer
 
from rhodecode.lib.utils import add_cache, action_logger
 
from rhodecode.lib.compat import json, OrderedDict
 

	
 
from rhodecode.model.db import Statistics, Repository, User
 

	
 

	
 
add_cache(config)
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats',
 
           'reset_user_password', 'send_email']
 

	
 

	
 
def get_logger(cls):
 
    if CELERY_ON:
 
        try:
 
            log = cls.get_logger()
 
        except:
 
            log = logging.getLogger(__name__)
 
    else:
 
        log = logging.getLogger(__name__)
 

	
 
    return log
 

	
 

	
 
@task(ignore_result=True)
 
@locked_task
 
@dbsession
 
def whoosh_index(repo_location, full_index):
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    log = whoosh_index.get_logger(whoosh_index)
 
    DBS = get_session()
 

	
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=DBS)\
 
                         .run(full_index=full_index)
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y):
 
    log = get_logger(get_commits_stats)
 
    DBS = get_session()
 
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config['here']
 

	
 
    log.info('running task with lockkey %s' % lockkey)
 

	
 
    try:
 
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
 

	
 
        # for js data compatibility cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is None:
 
            return True
 

	
 
        repo = repo.scm_instance
 
        repo_size = repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
        last_rev = None
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        dbrepo = DBS.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 
        cur_stats = DBS.query(Statistics)\
 
            .filter(Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == repo.get_changeset().revision and repo_size > 1:
 
            # pass silently without any work if we're not on first revision or
 
            # current state of parsing revision(from db marker) is the
 
            # last revision
 
            lock.release()
 
            return True
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s' % parse_limit)
 
        lmktime = mktime
 

	
 
        last_rev = last_rev + 1 if last_rev >= 0 else 0
 
        log.debug('Getting revisions from %s to %s' % (
 
             last_rev, last_rev + parse_limit)
 
        )
 
        for cs in repo[last_rev:last_rev + parse_limit]:
 
            log.debug('parsing %s' % cs)
 
            last_cs = cs  # remember last parsed changeset
 
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 

	
 
            if akc(cs.author) in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[akc(cs.author)]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = False
 

	
 
                if time_pos >= 0 and time_pos is not False:
 

	
 
                    datadict = \
 
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data']\
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time":k,
 
                                                 "commits":1,
 
                                                 "added":len(cs.added),
 
                                                 "changed":len(cs.changed),
 
                                                 "removed":len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            #gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = json.dumps(co_day_auth_aggr)
 
        stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
        log.debug('last revison %s' % last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s' % leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            DBS.add(stats)
 
            DBS.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            DBS.rollback()
 
            lock.release()
 
            return False
 

	
 
        #final release
 
        # final release
 
        lock.release()
 

	
 
        #execute another task if celery is enabled
 
        # execute another task if celery is enabled
 
        if len(repo.revisions) > 1 and CELERY_ON:
 
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
 
        return True
 
    except LockHeld:
 
        log.info('LockHeld')
 
        return 'Task with key %s already running' % lockkey
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def send_password_link(user_email):
 
    from rhodecode.model.notification import EmailNotificationModel
 

	
 
    log = get_logger(send_password_link)
 
    DBS = get_session()
 

	
 
    try:
 
        user = User.get_by_email(user_email)
 
        if user:
 
            log.debug('password reset user found %s' % user)
 
            link = url('reset_password_confirmation', key=user.api_key,
 
                       qualified=True)
 
            reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
 
            body = EmailNotificationModel().get_email_tmpl(reg_type,
 
                                                **{'user':user.short_contact,
 
                                                   'reset_url':link})
 
            log.debug('sending email')
 
            run_task(send_email, user_email,
 
                     _("password reset link"), body)
 
            log.info('send new password mail to %s' % user_email)
 
        else:
 
            log.debug("password reset email %s not found" % user_email)
 
    except:
 
        log.error(traceback.format_exc())
 
        return False
 

	
 
    return True
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def reset_user_password(user_email):
 
    from rhodecode.lib import auth
 

	
 
    log = get_logger(reset_user_password)
 
    DBS = get_session()
 

	
 
    try:
 
        try:
 
            user = User.get_by_email(user_email)
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                             auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                user.api_key = auth.generate_api_key(user.username)
 
                DBS.add(user)
 
                DBS.commit()
 
                log.info('change password for %s' % user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 
        except:
 
            log.error(traceback.format_exc())
 
            DBS.rollback()
 

	
 
        run_task(send_email, user_email,
 
                 'Your new password',
 
                 'Your new RhodeCode password:%s' % (new_passwd))
 
        log.info('send new password mail to %s' % user_email)
 

	
 
    except:
 
        log.error('Failed to update user password')
 
        log.error(traceback.format_exc())
 

	
 
    return True
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def send_email(recipients, subject, body, html_body=''):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, it this is empty the defined email
 
        address from field 'email_to' is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    :param html_body: html version of body
 
    """
 
    log = get_logger(send_email)
 
    DBS = get_session()
 

	
 
    email_config = config
 
    subject = "%s %s" % (email_config.get('email_prefix'), subject)
 
    if not recipients:
 
        # if recipients are not defined we send to email_config + all admins
 
        admins = [u.email for u in User.query()
 
                  .filter(User.admin == True).all()]
 
        recipients = [email_config.get('email_to')] + admins
 

	
 
    mail_from = email_config.get('app_email_from', 'RhodeCode')
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(config.get('debug'))
 
    smtp_auth = email_config.get('smtp_auth')
 

	
 
    try:
 
        m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body, html_body)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def create_repo_fork(form_data, cur_user):
 
    """
 
    Creates a fork of repository using interval VCS methods
 

	
 
    :param form_data:
 
    :param cur_user:
 
    """
 
    from rhodecode.model.repo import RepoModel
 

	
 
    log = get_logger(create_repo_fork)
 
    DBS = get_session()
 

	
 
    base_path = Repository.base_path()
 

	
 
    RepoModel(DBS).create(form_data, cur_user, just_db=True, fork=True)
 

	
 
    alias = form_data['repo_type']
 
    org_repo_name = form_data['org_path']
 
    fork_name = form_data['repo_name_full']
 
    update_after_clone = form_data['update_after_clone']
 
    source_repo_path = os.path.join(base_path, org_repo_name)
 
    destination_fork_path = os.path.join(base_path, fork_name)
 

	
 
    log.info('creating fork of %s as %s', source_repo_path,
 
             destination_fork_path)
 
    backend = get_backend(alias)
 
    backend(safe_str(destination_fork_path), create=True,
 
            src_url=safe_str(source_repo_path),
 
            update_after_clone=update_after_clone)
 
    action_logger(cur_user, 'user_forked_repo:%s' % fork_name,
 
                   org_repo_name, '', DBS)
 

	
 
    action_logger(cur_user, 'user_created_fork:%s' % fork_name,
 
                   fork_name, '', DBS)
 
    # finally commit at latest possible stage
 
    DBS.commit()
 

	
 
def __get_codes_stats(repo_name):
 
    from rhodecode.config.conf import  LANGUAGES_EXTENSIONS_MAP
 
    repo = Repository.get_by_repo_name(repo_name).scm_instance
 

	
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            ext = lower(f.extension)
 
            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
 
                if ext in code_stats:
 
                    code_stats[ext] += 1
 
                else:
 
                    code_stats[ext] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
rhodecode/lib/celerypylons/commands.py
Show inline comments
 
import rhodecode
 
from rhodecode.lib.utils import BasePasterCommand, Command
 
from rhodecode.lib.utils import BasePasterCommand, Command, load_rcextensions
 
from celery.app import app_or_default
 
from celery.bin import camqadm, celerybeat, celeryd, celeryev
 

	
 
from rhodecode.lib import str2bool
 
from rhodecode.lib.utils2 import str2bool
 

	
 
__all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand',
 
           'CAMQPAdminCommand', 'CeleryEventCommand']
 

	
 

	
 
class CeleryCommand(BasePasterCommand):
 
    """Abstract class implements run methods needed for celery
 

	
 
    Starts the celery worker that uses a paste.deploy configuration
 
    file.
 
    """
 

	
 
    def update_parser(self):
 
        """
 
        Abstract method.  Allows for the class's parser to be updated
 
        before the superclass's `run` method is called.  Necessary to
 
        allow options/arguments to be passed through to the underlying
 
        celery command.
 
        """
 

	
 
        cmd = self.celery_command(app_or_default())
 
        for x in cmd.get_options():
 
            self.parser.add_option(x)
 

	
 
    def command(self):
 
        from pylons import config
 
        try:
 
            CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
 
        except KeyError:
 
            CELERY_ON = False
 

	
 
        if CELERY_ON == False:
 
            raise Exception('Please enable celery_on in .ini config '
 
                            'file before running celeryd')
 
        rhodecode.CELERY_ON = CELERY_ON
 
        load_rcextensions(config['here'])
 
        cmd = self.celery_command(app_or_default())
 
        return cmd.run(**vars(self.options))
 

	
 

	
 
class CeleryDaemonCommand(CeleryCommand):
 
    """Start the celery worker
 

	
 
    Starts the celery worker that uses a paste.deploy configuration
 
    file.
 
    """
 
    usage = 'CONFIG_FILE [celeryd options...]'
 
    summary = __doc__.splitlines()[0]
 
    description = "".join(__doc__.splitlines()[2:])
 

	
 
    parser = Command.standard_parser(quiet=True)
 
    celery_command = celeryd.WorkerCommand
 

	
 

	
 
class CeleryBeatCommand(CeleryCommand):
 
    """Start the celery beat server
 

	
 
    Starts the celery beat server using a paste.deploy configuration
 
    file.
 
    """
 
    usage = 'CONFIG_FILE [celerybeat options...]'
 
    summary = __doc__.splitlines()[0]
 
    description = "".join(__doc__.splitlines()[2:])
 

	
 
    parser = Command.standard_parser(quiet=True)
 
    celery_command = celerybeat.BeatCommand
 

	
 

	
 
class CAMQPAdminCommand(CeleryCommand):
 
    """CAMQP Admin
 

	
 
    CAMQP celery admin tool.
 
    """
 
    usage = 'CONFIG_FILE [camqadm options...]'
 
    summary = __doc__.splitlines()[0]
 
    description = "".join(__doc__.splitlines()[2:])
 

	
 
    parser = Command.standard_parser(quiet=True)
 
    celery_command = camqadm.AMQPAdminCommand
 

	
 

	
 
class CeleryEventCommand(CeleryCommand):
 
    """Celery event command.
 

	
 
    Capture celery events.
 
    """
 
    usage = 'CONFIG_FILE [celeryev options...]'
 
    summary = __doc__.splitlines()[0]
 
    description = "".join(__doc__.splitlines()[2:])
 

	
 
    parser = Command.standard_parser(quiet=True)
 
    celery_command = celeryev.EvCommand
rhodecode/lib/dbmigrate/migrate/exceptions.py
Show inline comments
 
"""
 
   Provide exception classes for :mod:`migrate`
 
"""
 

	
 

	
 
class Error(Exception):
 
    """Error base class."""
 

	
 

	
 
class ApiError(Error):
 
    """Base class for API errors."""
 

	
 

	
 
class KnownError(ApiError):
 
    """A known error condition."""
 

	
 

	
 
class UsageError(ApiError):
 
    """A known error condition where help should be displayed."""
 

	
 

	
 
class ControlledSchemaError(Error):
 
    """Base class for controlled schema errors."""
 

	
 

	
 
class InvalidVersionError(ControlledSchemaError):
 
    """Invalid version number."""
 

	
 

	
 
class DatabaseNotControlledError(ControlledSchemaError):
 
    """Database should be under version control, but it's not."""
 

	
 

	
 
class DatabaseAlreadyControlledError(ControlledSchemaError):
 
    """Database shouldn't be under version control, but it is"""
 

	
 

	
 
class WrongRepositoryError(ControlledSchemaError):
 
    """This database is under version control by another repository."""
 

	
 

	
 
class NoSuchTableError(ControlledSchemaError):
 
    """The table does not exist."""
 

	
 

	
 
class PathError(Error):
 
    """Base class for path errors."""
 

	
 

	
 
class PathNotFoundError(PathError):
 
    """A path with no file was required; found a file."""
 

	
 

	
 
class PathFoundError(PathError):
 
    """A path with a file was required; found no file."""
 

	
 

	
 
class RepositoryError(Error):
 
    """Base class for repository errors."""
 

	
 

	
 
class InvalidRepositoryError(RepositoryError):
 
    """Invalid repository error."""
 

	
 

	
 
class ScriptError(Error):
 
    """Base class for script errors."""
 

	
 

	
 
class InvalidScriptError(ScriptError):
 
    """Invalid script error."""
 

	
 

	
 
class InvalidVersionError(Error):
 
    """Invalid version error."""
 

	
 
# migrate.changeset
 

	
 
class NotSupportedError(Error):
 
    """Not supported error"""
 

	
 

	
 
class InvalidConstraintError(Error):
 
    """Invalid constraint error"""
 

	
 
class MigrateDeprecationWarning(DeprecationWarning):
 
    """Warning for deprecated features in Migrate"""
rhodecode/lib/dbmigrate/schema/db_1_2_0.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.model.db
 
    ~~~~~~~~~~~~~~~~~~
 

	
 
    Database Models for RhodeCode
 

	
 
    :created_on: Apr 08, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import logging
 
import datetime
 
import traceback
 
from datetime import date
 

	
 
from sqlalchemy import *
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
 
from beaker.cache import cache_region, region_invalidate
 

	
 
from rhodecode.lib.vcs import get_backend
 
from rhodecode.lib.vcs.utils.helpers import get_scm
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 

	
 
from rhodecode.lib import str2bool, safe_str, get_changeset_safe, \
 
from rhodecode.lib.utils2 import str2bool, safe_str, get_changeset_safe, \
 
    generate_api_key, safe_unicode
 
from rhodecode.lib.exceptions import UsersGroupsAssignedException
 
from rhodecode.lib.compat import json
 

	
 
from rhodecode.model.meta import Base, Session
 
from rhodecode.lib.caching_query import FromCache
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
class ModelSerializer(json.JSONEncoder):
 
    """
 
    Simple Serializer for JSON,
 

	
 
    usage::
 

	
 
        to make object customized for serialization implement a __json__
 
        method that will return a dict for serialization into json
 

	
 
    example::
 

	
 
        class Task(object):
 

	
 
            def __init__(self, name, value):
 
                self.name = name
 
                self.value = value
 

	
 
            def __json__(self):
 
                return dict(name=self.name,
 
                            value=self.value)
 

	
 
    """
 

	
 
    def default(self, obj):
 

	
 
        if hasattr(obj, '__json__'):
 
            return obj.__json__()
 
        else:
 
            return json.JSONEncoder.default(self, obj)
 

	
 
class BaseModel(object):
 
    """Base Model for all classess
 

	
 
    """
 

	
 
    @classmethod
 
    def _get_keys(cls):
 
        """return column names for this model """
 
        return class_mapper(cls).c.keys()
 

	
 
    def get_dict(self):
 
        """return dict with keys and values corresponding
 
        to this model data """
 

	
 
        d = {}
 
        for k in self._get_keys():
 
            d[k] = getattr(self, k)
 
        return d
 

	
 
    def get_appstruct(self):
 
        """return list with keys and values tupples corresponding
 
        to this model data """
 

	
 
        l = []
 
        for k in self._get_keys():
 
            l.append((k, getattr(self, k),))
 
        return l
 

	
 
    def populate_obj(self, populate_dict):
 
        """populate model with data from given populate_dict"""
 

	
 
        for k in self._get_keys():
 
            if k in populate_dict:
 
                setattr(self, k, populate_dict[k])
 

	
 
    @classmethod
 
    def query(cls):
 
        return Session.query(cls)
 

	
 
    @classmethod
 
    def get(cls, id_):
 
        if id_:
 
            return cls.query().get(id_)
 

	
 
    @classmethod
 
    def getAll(cls):
 
        return cls.query().all()
 

	
 
    @classmethod
 
    def delete(cls, id_):
 
        obj = cls.query().get(id_)
 
        Session.delete(obj)
 
        Session.commit()
 

	
 

	
 
class RhodeCodeSetting(Base, BaseModel):
 
    __tablename__ = 'rhodecode_settings'
 
    __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
 
    app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __init__(self, k='', v=''):
 
        self.app_settings_name = k
 
        self.app_settings_value = v
 

	
 

	
 
    @validates('_app_settings_value')
 
    def validate_settings_value(self, key, val):
 
        assert type(val) == unicode
 
        return val
 

	
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        if v == 'ldap_active':
 
            v = str2bool(v)
 
        return v
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use unicode in app_settings_value
 

	
 
        :param val:
 
        """
 
        self._app_settings_value = safe_unicode(val)
 

	
 
    def __repr__(self):
 
        return "<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.app_settings_name, self.app_settings_value)
 

	
 

	
 
    @classmethod
 
    def get_by_name(cls, ldap_key):
 
        return cls.query()\
 
            .filter(cls.app_settings_name == ldap_key).scalar()
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if not ret:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings['rhodecode_' + each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_ldap_settings(cls, cache=False):
 
        ret = cls.query()\
 
                .filter(cls.app_settings_name.startswith('ldap_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd.update({row.app_settings_name:row.app_settings_value})
 

	
 
        return fd
 

	
 

	
 
class RhodeCodeUi(Base, BaseModel):
 
    __tablename__ = 'rhodecode_ui'
 
    __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 
    HOOK_PUSH = 'pretxnchangegroup.push_logger'
 
    HOOK_PULL = 'preoutgoing.pull_logger'
 

	
 
    ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
 

	
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.ui_key == key)
 

	
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE,
 
                                    cls.HOOK_REPO_SIZE,
 
                                    cls.HOOK_PUSH, cls.HOOK_PULL]))
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE,
 
                                    cls.HOOK_REPO_SIZE,
 
                                    cls.HOOK_PUSH, cls.HOOK_PULL]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_by_key(key).scalar() or cls()
 
        new_ui.ui_section = 'hooks'
 
        new_ui.ui_active = True
 
        new_ui.ui_key = key
 
        new_ui.ui_value = val
 

	
 
        Session.add(new_ui)
 
        Session.commit()
 

	
 

	
 
class User(Base, BaseModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
 
    user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    active = Column("active", Boolean(), nullable=True, unique=None, default=None)
 
    admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
 
    name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
 
    ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    user_log = relationship('UserLog', cascade='all')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
 

	
 
    group_member = relationship('UsersGroupMember', cascade='all')
 

	
 
    @property
 
    def full_contact(self):
 
        return '%s %s <%s>' % (self.name, self.lastname, self.email)
 

	
 
    @property
 
    def short_contact(self):
 
        return '%s %s' % (self.name, self.lastname)
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    def __repr__(self):
 
        try:
 
            return "<%s('id:%s:%s')>" % (self.__class__.__name__,
 
                                             self.user_id, self.username)
 
        except:
 
            return self.__class__.__name__
 

	
 
    @classmethod
 
    def get_by_username(cls, username, case_insensitive=False):
 
        if case_insensitive:
 
            return Session.query(cls).filter(cls.username.ilike(username)).scalar()
 
        else:
 
            return Session.query(cls).filter(cls.username == username).scalar()
 

	
 
    @classmethod
 
    def get_by_api_key(cls, api_key):
 
        return cls.query().filter(cls.api_key == api_key).one()
 

	
 
    def update_lastlogin(self):
 
        """Update user lastlogin"""
 

	
 
        self.last_login = datetime.datetime.now()
 
        Session.add(self)
 
        Session.commit()
 
        log.debug('updated user %s lastlogin' % self.username)
 

	
 
    @classmethod
 
    def create(cls, form_data):
 
        from rhodecode.lib.auth import get_crypt_password
 

	
 
        try:
 
            new_user = cls()
 
            for k, v in form_data.items():
 
                if k == 'password':
 
                    v = get_crypt_password(v)
 
                setattr(new_user, k, v)
 

	
 
            new_user.api_key = generate_api_key(form_data['username'])
 
            Session.add(new_user)
 
            Session.commit()
 
            return new_user
 
        except:
 
            log.error(traceback.format_exc())
 
            Session.rollback()
 
            raise
 

	
 
class UserLog(Base, BaseModel):
 
    __tablename__ = 'user_logs'
 
    __table_args__ = {'extend_existing':True}
 
    user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 
    repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
 

	
 
    @property
 
    def action_as_day(self):
 
        return date(*self.action_date.timetuple()[:3])
 

	
 
    user = relationship('User')
 
    repository = relationship('Repository')
 

	
 

	
 
class UsersGroup(Base, BaseModel):
 
    __tablename__ = 'users_groups'
 
    __table_args__ = {'extend_existing':True}
 

	
 
    users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
 

	
 
    members = relationship('UsersGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
 

	
 
    def __repr__(self):
 
        return '<userGroup(%s)>' % (self.users_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.users_group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.users_group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % group_name))
 
        return gr.scalar()
 

	
 

	
 
    @classmethod
 
    def get(cls, users_group_id, cache=False):
 
        users_group = cls.query()
 
        if cache:
 
            users_group = users_group.options(FromCache("sql_cache_short",
 
                                    "get_users_group_%s" % users_group_id))
 
        return users_group.get(users_group_id)
 

	
 
    @classmethod
 
    def create(cls, form_data):
 
        try:
 
            new_users_group = cls()
 
            for k, v in form_data.items():
 
                setattr(new_users_group, k, v)
 

	
 
            Session.add(new_users_group)
 
            Session.commit()
 
            return new_users_group
 
        except:
 
            log.error(traceback.format_exc())
 
            Session.rollback()
 
            raise
 

	
 
    @classmethod
 
    def update(cls, users_group_id, form_data):
 

	
 
        try:
 
            users_group = cls.get(users_group_id, cache=False)
 

	
 
            for k, v in form_data.items():
 
                if k == 'users_group_members':
 
                    users_group.members = []
 
                    Session.flush()
 
                    members_list = []
 
                    if v:
 
                        v = [v] if isinstance(v, basestring) else v
 
                        for u_id in set(v):
 
                            member = UsersGroupMember(users_group_id, u_id)
 
                            members_list.append(member)
 
                    setattr(users_group, 'members', members_list)
 
                setattr(users_group, k, v)
 

	
 
            Session.add(users_group)
 
            Session.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            Session.rollback()
 
            raise
 

	
 
    @classmethod
 
    def delete(cls, users_group_id):
 
        try:
 

	
 
            # check if this group is not assigned to repo
 
            assigned_groups = UsersGroupRepoToPerm.query()\
 
                .filter(UsersGroupRepoToPerm.users_group_id ==
 
                        users_group_id).all()
 

	
 
            if assigned_groups:
 
                raise UsersGroupsAssignedException('RepoGroup assigned to %s' %
 
                                                   assigned_groups)
 

	
 
            users_group = cls.get(users_group_id, cache=False)
 
            Session.delete(users_group)
 
            Session.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            Session.rollback()
 
            raise
 

	
 
class UsersGroupMember(Base, BaseModel):
 
    __tablename__ = 'users_groups_members'
 
    __table_args__ = {'extend_existing':True}
 

	
 
    users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User', lazy='joined')
 
    users_group = relationship('UsersGroup')
 

	
 
    def __init__(self, gr_id='', u_id=''):
 
        self.users_group_id = gr_id
 
        self.user_id = u_id
 

	
 
    @staticmethod
 
    def add_user_to_group(group, user):
 
        ugm = UsersGroupMember()
 
        ugm.users_group = group
 
        ugm.user = user
 
        Session.add(ugm)
 
        Session.commit()
 
        return ugm
 

	
 
class Repository(Base, BaseModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
 

	
 
    repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None)
 
    repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg')
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
 
    private = Column("private", Boolean(), nullable=True, unique=None, default=None)
 
    enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
 
    description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
 

	
 

	
 
    user = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UsersGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
 

	
 
    logs = relationship('UserLog', cascade='all')
 

	
 
    def __repr__(self):
 
        return "<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.repo_id, self.repo_name)
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return '/'
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        q = Session.query(cls).filter(cls.repo_name == repo_name)
 
        q = q.options(joinedload(Repository.fork))\
 
                .options(joinedload(Repository.user))\
 
                .options(joinedload(Repository.group))
 
        return q.one()
 

	
 
    @classmethod
 
    def get_repo_forks(cls, repo_id):
 
        return cls.query().filter(Repository.fork_id == repo_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path when all repos are stored
 

	
 
        :param cls:
 
        """
 
        q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
 
                                              cls.url_sep())
 
        q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def just_name(self):
 
        return self.repo_name.split(Repository.url_sep())[-1]
 

	
 
    @property
 
    def groups_with_parents(self):
 
        groups = []
 
        if self.group is None:
 
            return groups
 

	
 
        cur_gr = self.group
 
        groups.insert(0, cur_gr)
 
        while 1:
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            groups.insert(0, gr)
 

	
 
        return groups
 

	
 
    @property
 
    def groups_and_repo(self):
 
        return self.groups_with_parents, self.just_name
 

	
 
    @LazyProperty
 
    def repo_path(self):
 
        """
 
        Returns base full path for that repository means where it actually
 
        exists on a filesystem
 
        """
 
        q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
 
                                              Repository.url_sep())
 
        q.options(FromCache("sql_cache_short", "repository_repo_path"))
 
        return q.one().ui_value
 

	
 
    @property
 
    def repo_full_path(self):
 
        p = [self.repo_path]
 
        # we need to split the name by / since this is how we store the
 
        # names in the database, but that eventually needs to be converted
 
        # into a valid system path
 
        p += self.repo_name.split(Repository.url_sep())
 
        return os.path.join(*p)
 

	
 
    def get_new_name(self, repo_name):
 
        """
 
        returns new full repository name based on assigned group and new new
 

	
 
        :param group_name:
 
        """
 
        path_prefix = self.group.full_path_splitted if self.group else []
 
        return Repository.url_sep().join(path_prefix + [repo_name])
 

	
 
    @property
 
    def _ui(self):
 
        """
 
        Creates an db based ui object for this repository
 
        """
 
        from mercurial import ui
 
        from mercurial import config
 
        baseui = ui.ui()
 

	
 
        #clean the baseui object
 
        baseui._ocfg = config.config()
 
        baseui._ucfg = config.config()
 
        baseui._tcfg = config.config()
 

	
 

	
 
        ret = RhodeCodeUi.query()\
 
            .options(FromCache("sql_cache_short", "repository_repo_ui")).all()
 

	
 
        hg_ui = ret
 
        for ui_ in hg_ui:
 
            if ui_.ui_active:
 
                log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
 
                          ui_.ui_key, ui_.ui_value)
 
                baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
 

	
 
        return baseui
 

	
 
    @classmethod
 
    def is_valid(cls, repo_name):
 
        """
 
        returns True if given repo name is a valid filesystem repository
 

	
 
        :param cls:
 
        :param repo_name:
 
        """
 
        from rhodecode.lib.utils import is_valid_repo
 

	
 
        return is_valid_repo(repo_name, cls.base_path())
 

	
 

	
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    @property
 
    def tip(self):
 
        return self.get_changeset('tip')
 

	
 
    @property
 
    def author(self):
 
        return self.tip.author
 

	
 
    @property
 
    def last_change(self):
 
        return self.scm_instance.last_change
 

	
 
    #==========================================================================
 
    # SCM CACHE INSTANCE
 
    #==========================================================================
 

	
 
    @property
 
    def invalidate(self):
 
        return CacheInvalidation.invalidate(self.repo_name)
 

	
 
    def set_invalidate(self):
 
        """
 
        set a cache for invalidation for this instance
 
        """
 
        CacheInvalidation.set_invalidate(self.repo_name)
 

	
 
    @LazyProperty
 
    def scm_instance(self):
 
        return self.__get_instance()
 

	
 
    @property
 
    def scm_instance_cached(self):
 
        @cache_region('long_term')
 
        def _c(repo_name):
 
            return self.__get_instance()
 
        rn = self.repo_name
 

	
 
        inv = self.invalidate
 
        if inv is not None:
 
            region_invalidate(_c, None, rn)
 
            # update our cache
 
            CacheInvalidation.set_valid(inv.cache_key)
 
        return _c(rn)
 

	
 
    def __get_instance(self):
 

	
 
        repo_full_path = self.repo_full_path
 

	
 
        try:
 
            alias = get_scm(repo_full_path)[0]
 
            log.debug('Creating instance of %s repository' % alias)
 
            backend = get_backend(alias)
 
        except VCSError:
 
            log.error(traceback.format_exc())
 
            log.error('Perhaps this repository is in db and not in '
 
                      'filesystem run rescan repositories with '
 
                      '"destroy old data " option from admin panel')
 
            return
 

	
 
        if alias == 'hg':
 

	
 
            repo = backend(safe_str(repo_full_path), create=False,
 
                           baseui=self._ui)
 
            # skip hidden web repository
 
            if repo._get_hidden():
 
                return
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 

	
 
class RepoGroup(Base, BaseModel):
 
class Group(Base, BaseModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
 
                      CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},)
 
    __mapper_args__ = {'order_by':'group_name'}
 

	
 
    group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None)
 
    group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
 
    group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 

	
 
    parent_group = relationship('Group', remote_side=group_id)
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __repr__(self):
 
        return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
 
                                  self.group_name)
 

	
 
    @classmethod
 
    def groups_choices(cls):
 
        from webhelpers.html import literal as _literal
 
        repo_groups = [('', '')]
 
        sep = ' &raquo; '
 
        _name = lambda k: _literal(sep.join(k))
 

	
 
        repo_groups.extend([(x.group_id, _name(x.full_path_splitted))
 
                              for x in cls.query().all()])
 

	
 
        repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0])
 
        return repo_groups
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return '/'
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        if case_insensitive:
 
            gr = cls.query()\
 
                .filter(cls.group_name.ilike(group_name))
 
        else:
 
            gr = cls.query()\
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache("sql_cache_short",
 
                                          "get_group_%s" % group_name))
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        parents_recursion_limit = 5
 
        groups = []
 
        if self.parent_group is None:
 
            return groups
 
        cur_gr = self.parent_group
 
        groups.insert(0, cur_gr)
 
        cnt = 0
 
        while 1:
 
            cnt += 1
 
            gr = getattr(cur_gr, 'parent_group', None)
 
            cur_gr = cur_gr.parent_group
 
            if gr is None:
 
                break
 
            if cnt == parents_recursion_limit:
 
                # this will prevent accidental infinit loops
 
                log.error('group nested more than %s' %
 
                          parents_recursion_limit)
 
                break
 

	
 
            groups.insert(0, gr)
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return Group.query().filter(Group.parent_group == self)
 

	
 
    @property
 
    def name(self):
 
        return self.group_name.split(Group.url_sep())[-1]
 

	
 
    @property
 
    def full_path(self):
 
        return self.group_name
 

	
 
    @property
 
    def full_path_splitted(self):
 
        return self.group_name.split(Group.url_sep())
 

	
 
    @property
 
    def repositories(self):
 
        return Repository.query().filter(Repository.group == self)
 

	
 
    @property
 
    def repositories_recursive_count(self):
 
        cnt = self.repositories.count()
 

	
 
        def children_count(group):
 
            cnt = 0
 
            for child in group.children:
 
                cnt += child.repositories.count()
 
                cnt += children_count(child)
 
            return cnt
 

	
 
        return cnt + children_count(self)
 

	
 

	
 
    def get_new_name(self, group_name):
 
        """
 
        returns new full group name based on parent and new name
 

	
 
        :param group_name:
 
        """
 
        path_prefix = (self.parent_group.full_path_splitted if
 
                       self.parent_group else [])
 
        return Group.url_sep().join(path_prefix + [group_name])
 

	
 

	
 
class Permission(Base, BaseModel):
 
    __tablename__ = 'permissions'
 
    __table_args__ = {'extend_existing':True}
 
    permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 

	
 
    def __repr__(self):
 
        return "<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.permission_id, self.permission_name)
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.permission_name == key).scalar()
 

	
 
class UserRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'repo_to_perm'
 
    __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
 
    repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
class UserToPerm(Base, BaseModel):
 
    __tablename__ = 'user_to_perm'
 
    __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
 
    user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission')
 

	
 
    @classmethod
 
    def has_perm(cls, user_id, perm):
 
        if not isinstance(perm, Permission):
 
            raise Exception('perm needs to be an instance of Permission class')
 

	
 
        return cls.query().filter(cls.user_id == user_id)\
 
            .filter(cls.permission == perm).scalar() is not None
 

	
 
    @classmethod
 
    def grant_perm(cls, user_id, perm):
 
        if not isinstance(perm, Permission):
 
            raise Exception('perm needs to be an instance of Permission class')
 

	
 
        new = cls()
 
        new.user_id = user_id
 
        new.permission = perm
 
        try:
 
            Session.add(new)
 
            Session.commit()
 
        except:
 
            Session.rollback()
 

	
 

	
 
    @classmethod
 
    def revoke_perm(cls, user_id, perm):
 
        if not isinstance(perm, Permission):
 
            raise Exception('perm needs to be an instance of Permission class')
 

	
 
        try:
 
            cls.query().filter(cls.user_id == user_id)\
 
                .filter(cls.permission == perm).delete()
 
            Session.commit()
 
        except:
 
            Session.rollback()
 

	
 
class UsersGroupRepoToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_repo_to_perm'
 
    __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UsersGroup')
 
    permission = relationship('Permission')
 
    repository = relationship('Repository')
 

	
 
    def __repr__(self):
 
        return '<userGroup:%s => %s >' % (self.users_group, self.repository)
 

	
 
class UsersGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'users_group_to_perm'
 
    __table_args__ = {'extend_existing':True}
 
    users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 

	
 
    users_group = relationship('UsersGroup')
 
    permission = relationship('Permission')
 

	
 

	
 
    @classmethod
 
    def has_perm(cls, users_group_id, perm):
 
        if not isinstance(perm, Permission):
 
            raise Exception('perm needs to be an instance of Permission class')
 

	
 
        return cls.query().filter(cls.users_group_id ==
 
                                         users_group_id)\
 
                                         .filter(cls.permission == perm)\
 
                                         .scalar() is not None
 

	
 
    @classmethod
 
    def grant_perm(cls, users_group_id, perm):
 
        if not isinstance(perm, Permission):
 
            raise Exception('perm needs to be an instance of Permission class')
 

	
 
        new = cls()
 
        new.users_group_id = users_group_id
 
        new.permission = perm
 
        try:
 
            Session.add(new)
 
            Session.commit()
 
        except:
 
            Session.rollback()
 

	
 

	
 
    @classmethod
 
    def revoke_perm(cls, users_group_id, perm):
 
        if not isinstance(perm, Permission):
 
            raise Exception('perm needs to be an instance of Permission class')
 

	
 
        try:
 
            cls.query().filter(cls.users_group_id == users_group_id)\
 
                .filter(cls.permission == perm).delete()
 
            Session.commit()
 
        except:
 
            Session.rollback()
 

	
 

	
 
class UserRepoGroupToPerm(Base, BaseModel):
 
    __tablename__ = 'group_to_perm'
 
    __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
 

	
 
    group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
 
    group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
 

	
 
    user = relationship('User')
 
    permission = relationship('Permission')
 
    group = relationship('RepoGroup')
 

	
 
class Statistics(Base, BaseModel):
 
    __tablename__ = 'statistics'
 
    __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
 
    stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
 
    stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
 
    commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
 
    commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
 
    languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
 

	
 
    repository = relationship('Repository', single_parent=True)
 

	
 
class UserFollowing(Base, BaseModel):
 
    __tablename__ = 'user_followings'
 
    __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
 
                      UniqueConstraint('user_id', 'follows_user_id')
 
                      , {'extend_existing':True})
 

	
 
    user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
 
    follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
 
    follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
 
    follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
 

	
 
    user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
 

	
 
    follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
 
    follows_repository = relationship('Repository', order_by='Repository.repo_name')
 

	
 

	
 
    @classmethod
 
    def get_repo_followers(cls, repo_id):
 
        return cls.query().filter(cls.follows_repo_id == repo_id)
 

	
 
class CacheInvalidation(Base, BaseModel):
 
    __tablename__ = 'cache_invalidation'
 
    __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
 
    cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
 
    cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None)
 
    cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
 

	
 

	
 
    def __init__(self, cache_key, cache_args=''):
 
        self.cache_key = cache_key
 
        self.cache_args = cache_args
 
        self.cache_active = False
 

	
 
    def __repr__(self):
 
        return "<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.cache_id, self.cache_key)
 

	
 
    @classmethod
 
    def invalidate(cls, key):
 
        """
 
        Returns Invalidation object if this given key should be invalidated
 
        None otherwise. `cache_active = False` means that this cache
 
        state is not valid and needs to be invalidated
 

	
 
        :param key:
 
        """
 
        return cls.query()\
 
                .filter(CacheInvalidation.cache_key == key)\
 
                .filter(CacheInvalidation.cache_active == False)\
 
                .scalar()
 

	
 
    @classmethod
 
    def set_invalidate(cls, key):
 
        """
 
        Mark this Cache key for invalidation
 

	
 
        :param key:
 
        """
 

	
 
        log.debug('marking %s for invalidation' % key)
 
        inv_obj = Session.query(cls)\
 
            .filter(cls.cache_key == key).scalar()
 
        if inv_obj:
 
            inv_obj.cache_active = False
 
        else:
 
            log.debug('cache key not found in invalidation db -> creating one')
 
            inv_obj = CacheInvalidation(key)
 

	
 
        try:
 
            Session.add(inv_obj)
 
            Session.commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session.rollback()
 

	
 
    @classmethod
 
    def set_valid(cls, key):
 
        """
 
        Mark this cache key as active and currently cached
 

	
 
        :param key:
 
        """
 
        inv_obj = Session.query(CacheInvalidation)\
 
            .filter(CacheInvalidation.cache_key == key).scalar()
 
        inv_obj.cache_active = True
 
        Session.add(inv_obj)
 
        Session.commit()
 

	
 
class DbMigrateVersion(Base, BaseModel):
 
    __tablename__ = 'db_migrate_version'
 
    __table_args__ = {'extend_existing':True}
 
    repository_id = Column('repository_id', String(250), primary_key=True)
 
    repository_path = Column('repository_path', Text)
 
    version = Column('version', Integer)
rhodecode/lib/helpers.py
Show inline comments
 
"""Helper functions
 

	
 
Consists of functions to typically be used within templates, but also
 
available to Controllers. This module is available to both as 'h'.
 
"""
 
import random
 
import hashlib
 
import StringIO
 
import urllib
 
import math
 
import logging
 

	
 
from datetime import datetime
 
from pygments.formatters.html import HtmlFormatter
 
from pygments import highlight as code_highlight
 
from pylons import url, request, config
 
from pylons.i18n.translation import _, ungettext
 
from hashlib import md5
 

	
 
from webhelpers.html import literal, HTML, escape
 
from webhelpers.html.tools import *
 
from webhelpers.html.builder import make_tag
 
from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
 
    end_form, file, form, hidden, image, javascript_link, link_to, \
 
    link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
 
    submit, text, password, textarea, title, ul, xml_declaration, radio
 
from webhelpers.html.tools import auto_link, button_to, highlight, \
 
    js_obfuscate, mail_to, strip_links, strip_tags, tag_re
 
from webhelpers.number import format_byte_size, format_bit_size
 
from webhelpers.pylonslib import Flash as _Flash
 
from webhelpers.pylonslib.secure_form import secure_form
 
from webhelpers.text import chop_at, collapse, convert_accented_entities, \
 
    convert_misc_entities, lchop, plural, rchop, remove_formatting, \
 
    replace_whitespace, urlify, truncate, wrap_paragraphs
 
from webhelpers.date import time_ago_in_words
 
from webhelpers.paginate import Page
 
from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
 
    convert_boolean_attrs, NotGiven, _make_safe_id_component
 

	
 
from rhodecode.lib.annotate import annotate_highlight
 
from rhodecode.lib.utils import repo_name_slug
 
from rhodecode.lib import str2bool, safe_unicode, safe_str, get_changeset_safe
 
from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
 
    get_changeset_safe
 
from rhodecode.lib.markup_renderer import MarkupRenderer
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
 
    """
 
    Reset button
 
    """
 
    _set_input_attrs(attrs, type, name, value)
 
    _set_id_attr(attrs, id, name)
 
    convert_boolean_attrs(attrs, ["disabled"])
 
    return HTML.input(**attrs)
 

	
 
reset = _reset
 
safeid = _make_safe_id_component
 

	
 

	
 
def FID(raw_id, path):
 
    """
 
    Creates a uniqe ID for filenode based on it's hash of path and revision
 
    it's safe to use in urls
 

	
 
    :param raw_id:
 
    :param path:
 
    """
 

	
 
    return 'C-%s-%s' % (short_id(raw_id), md5(path).hexdigest()[:12])
 

	
 

	
 
def get_token():
 
    """Return the current authentication token, creating one if one doesn't
 
    already exist.
 
    """
 
    token_key = "_authentication_token"
 
    from pylons import session
 
    if not token_key in session:
 
        try:
 
            token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
 
        except AttributeError: # Python < 2.4
 
            token = hashlib.sha1(str(random.randrange(2 ** 128))).hexdigest()
 
        session[token_key] = token
 
        if hasattr(session, 'save'):
 
            session.save()
 
    return session[token_key]
 

	
 
class _GetError(object):
 
    """Get error from form_errors, and represent it as span wrapped error
 
    message
 

	
 
    :param field_name: field to fetch errors for
 
    :param form_errors: form errors dict
 
    """
 

	
 
    def __call__(self, field_name, form_errors):
 
        tmpl = """<span class="error_msg">%s</span>"""
 
        if form_errors and form_errors.has_key(field_name):
 
            return literal(tmpl % form_errors.get(field_name))
 

	
 
get_error = _GetError()
 

	
 
class _ToolTip(object):
 

	
 
    def __call__(self, tooltip_title, trim_at=50):
 
        """Special function just to wrap our text into nice formatted
 
        autowrapped text
 

	
 
        :param tooltip_title:
 
        """
 
        return escape(tooltip_title)
 
tooltip = _ToolTip()
 

	
 
class _FilesBreadCrumbs(object):
 

	
 
    def __call__(self, repo_name, rev, paths):
 
        if isinstance(paths, str):
 
            paths = safe_unicode(paths)
 
        url_l = [link_to(repo_name, url('files_home',
 
                                        repo_name=repo_name,
 
                                        revision=rev, f_path=''))]
 
        paths_l = paths.split('/')
 
        for cnt, p in enumerate(paths_l):
 
            if p != '':
 
                url_l.append(link_to(p,
 
                                     url('files_home',
 
                                         repo_name=repo_name,
 
                                         revision=rev,
 
                                         f_path='/'.join(paths_l[:cnt + 1])
 
                                         )
 
                                     )
 
                             )
 

	
 
        return literal('/'.join(url_l))
 

	
 
files_breadcrumbs = _FilesBreadCrumbs()
 

	
 
class CodeHtmlFormatter(HtmlFormatter):
 
    """My code Html Formatter for source codes
 
    """
 

	
 
    def wrap(self, source, outfile):
 
        return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
 

	
 
    def _wrap_code(self, source):
 
        for cnt, it in enumerate(source):
 
            i, t = it
 
            t = '<div id="L%s">%s</div>' % (cnt + 1, t)
 
            yield i, t
 

	
 
    def _wrap_tablelinenos(self, inner):
 
        dummyoutfile = StringIO.StringIO()
 
        lncount = 0
 
        for t, line in inner:
 
            if t:
 
                lncount += 1
 
            dummyoutfile.write(line)
 

	
 
        fl = self.linenostart
 
        mw = len(str(lncount + fl - 1))
 
        sp = self.linenospecial
 
        st = self.linenostep
 
        la = self.lineanchors
 
        aln = self.anchorlinenos
 
        nocls = self.noclasses
 
        if sp:
 
            lines = []
 

	
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if i % sp == 0:
 
                        if aln:
 
                            lines.append('<a href="#%s%d" class="special">%*d</a>' %
 
                                         (la, i, mw, i))
 
                        else:
 
                            lines.append('<span class="special">%*d</span>' % (mw, i))
 
                    else:
 
                        if aln:
 
                            lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                        else:
 
                            lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 
        else:
 
            lines = []
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if aln:
 
                        lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                    else:
 
                        lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 

	
 
        # in case you wonder about the seemingly redundant <div> here: since the
 
        # content in the other cell also is wrapped in a div, some browsers in
 
        # some configurations seem to mess up the formatting...
 
        if nocls:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td><div class="linenodiv" '
 
                      'style="background-color: #f0f0f0; padding-right: 10px">'
 
                      '<pre style="line-height: 125%">' +
 
                      ls + '</pre></div></td><td id="hlcode" class="code">')
 
        else:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td class="linenos"><div class="linenodiv"><pre>' +
 
                      ls + '</pre></div></td><td id="hlcode" class="code">')
 
        yield 0, dummyoutfile.getvalue()
 
        yield 0, '</td></tr></table>'
 

	
 

	
 
def pygmentize(filenode, **kwargs):
 
    """pygmentize function using pygments
 

	
 
    :param filenode:
 
    """
 

	
 
    return literal(code_highlight(filenode.content,
 
                                  filenode.lexer, CodeHtmlFormatter(**kwargs)))
 

	
 

	
 
def pygmentize_annotation(repo_name, filenode, **kwargs):
 
    """
 
    pygmentize function for annotation
 

	
 
    :param filenode:
 
    """
 

	
 
    color_dict = {}
 

	
 
    def gen_color(n=10000):
 
        """generator for getting n of evenly distributed colors using
 
        hsv color and golden ratio. It always return same order of colors
 

	
 
        :returns: RGB tuple
 
        """
 

	
 
        def hsv_to_rgb(h, s, v):
 
            if s == 0.0:
 
                return v, v, v
 
            i = int(h * 6.0)  # XXX assume int() truncates!
 
            f = (h * 6.0) - i
 
            p = v * (1.0 - s)
 
            q = v * (1.0 - s * f)
 
            t = v * (1.0 - s * (1.0 - f))
 
            i = i % 6
 
            if i == 0:
 
                return v, t, p
 
            if i == 1:
 
                return q, v, p
 
            if i == 2:
 
                return p, v, t
 
            if i == 3:
 
                return p, q, v
 
            if i == 4:
 
                return t, p, v
 
            if i == 5:
 
                return v, p, q
 

	
 
        golden_ratio = 0.618033988749895
 
        h = 0.22717784590367374
 

	
 
        for _ in xrange(n):
 
            h += golden_ratio
 
            h %= 1
 
            HSV_tuple = [h, 0.95, 0.95]
 
            RGB_tuple = hsv_to_rgb(*HSV_tuple)
 
            yield map(lambda x: str(int(x * 256)), RGB_tuple)
 

	
 
    cgenerator = gen_color()
 

	
 
    def get_color_string(cs):
 
        if cs in color_dict:
 
            col = color_dict[cs]
 
        else:
 
            col = color_dict[cs] = cgenerator.next()
 
        return "color: rgb(%s)! important;" % (', '.join(col))
 

	
 
    def url_func(repo_name):
 

	
 
        def _url_func(changeset):
 
            author = changeset.author
 
            date = changeset.date
 
            message = tooltip(changeset.message)
 

	
 
            tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
 
                            " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
 
                            "</b> %s<br/></div>")
 

	
 
            tooltip_html = tooltip_html % (author, date, message)
 
            lnk_format = '%5s:%s' % ('r%s' % changeset.revision,
 
                                     short_id(changeset.raw_id))
 
            uri = link_to(
 
                    lnk_format,
 
                    url('changeset_home', repo_name=repo_name,
 
                        revision=changeset.raw_id),
 
                    style=get_color_string(changeset.raw_id),
 
                    class_='tooltip',
 
                    title=tooltip_html
 
                  )
 

	
 
            uri += '\n'
 
            return uri
 
        return _url_func
 

	
 
    return literal(annotate_highlight(filenode, url_func(repo_name), **kwargs))
 

	
 

	
 
def is_following_repo(repo_name, user_id):
 
    from rhodecode.model.scm import ScmModel
 
    return ScmModel().is_following_repo(repo_name, user_id)
 

	
 
flash = _Flash()
 

	
 
#==============================================================================
 
# SCM FILTERS available via h.
 
#==============================================================================
 
from rhodecode.lib.vcs.utils import author_name, author_email
 
from rhodecode.lib import credentials_filter, age as _age
 
from rhodecode.lib.utils2 import credentials_filter, age as _age
 
from rhodecode.model.db import User
 

	
 
age = lambda  x: _age(x)
 
capitalize = lambda x: x.capitalize()
 
email = author_email
 
short_id = lambda x: x[:12]
 
hide_credentials = lambda x: ''.join(credentials_filter(x))
 

	
 

	
 
def is_git(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'git'
 

	
 

	
 
def is_hg(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'hg'
 

	
 

	
 
def email_or_none(author):
 
    _email = email(author)
 
    if _email != '':
 
        return _email
 

	
 
    # See if it contains a username we can get an email from
 
    user = User.get_by_username(author_name(author), case_insensitive=True,
 
                                cache=True)
 
    if user is not None:
 
        return user.email
 

	
 
    # No valid email, not a valid user in the system, none!
 
    return None
 

	
 

	
 
def person(author):
 
    # attr to return from fetched user
 
    person_getter = lambda usr: usr.username
 

	
 
    # Valid email in the attribute passed, see if they're in the system
 
    _email = email(author)
 
    if _email != '':
 
        user = User.get_by_email(_email, case_insensitive=True, cache=True)
 
        if user is not None:
 
            return person_getter(user)
 
        return _email
 

	
 
    # Maybe it's a username?
 
    _author = author_name(author)
 
    user = User.get_by_username(_author, case_insensitive=True,
 
                                cache=True)
 
    if user is not None:
 
        return person_getter(user)
 

	
 
    # Still nothing?  Just pass back the author name then
 
    return _author
 

	
 

	
 
def bool2icon(value):
 
    """Returns True/False values represented as small html image of true/false
 
    icons
 

	
 
    :param value: bool value
 
    """
 

	
 
    if value is True:
 
        return HTML.tag('img', src=url("/images/icons/accept.png"),
 
                        alt=_('True'))
 

	
 
    if value is False:
 
        return HTML.tag('img', src=url("/images/icons/cancel.png"),
 
                        alt=_('False'))
 

	
 
    return value
 

	
 

	
 
def action_parser(user_log, feed=False):
 
    """
 
    This helper will action_map the specified string action into translated
 
    fancy names with icons and links
 

	
 
    :param user_log: user log instance
 
    :param feed: use output for feeds (no html and fancy icons)
 
    """
 

	
 
    action = user_log.action
 
    action_params = ' '
 

	
 
    x = action.split(':')
 

	
 
    if len(x) > 1:
 
        action, action_params = x
 

	
 
    def get_cs_links():
 
        revs_limit = 3  # display this amount always
 
        revs_top_limit = 50  # show upto this amount of changesets hidden
 
        revs_ids = action_params.split(',')
 
        deleted = user_log.repository is None
 
        if deleted:
 
            return ','.join(revs_ids)
 

	
 
        repo_name = user_log.repository.repo_name
 

	
 
        repo = user_log.repository.scm_instance
 

	
 
        message = lambda rev: rev.message
 
        lnk = lambda rev, repo_name: (
 
            link_to('r%s:%s' % (rev.revision, rev.short_id),
 
                    url('changeset_home', repo_name=repo_name,
 
                        revision=rev.raw_id),
 
                    title=tooltip(message(rev)), class_='tooltip')
 
        )
 
        # get only max revs_top_limit of changeset for performance/ui reasons
 
        revs = [
 
            x for x in repo.get_changesets(revs_ids[0],
 
                                           revs_ids[:revs_top_limit][-1])
 
        ]
 

	
 
        cs_links = []
 
        cs_links.append(" " + ', '.join(
 
            [lnk(rev, repo_name) for rev in revs[:revs_limit]]
 
            )
 
        )
 

	
 
        compare_view = (
 
            ' <div class="compare_view tooltip" title="%s">'
 
            '<a href="%s">%s</a> </div>' % (
 
                _('Show all combined changesets %s->%s') % (
 
                    revs_ids[0], revs_ids[-1]
 
                ),
 
                url('changeset_home', repo_name=repo_name,
 
                    revision='%s...%s' % (revs_ids[0], revs_ids[-1])
 
                ),
 
                _('compare view')
 
            )
 
        )
 

	
 
        # if we have exactly one more than normally displayed
 
        # just display it, takes less space than displaying
 
        # "and 1 more revisions"
 
        if len(revs_ids) == revs_limit + 1:
 
            rev = revs[revs_limit]
 
            cs_links.append(", " + lnk(rev, repo_name))
 

	
 
        # hidden-by-default ones
 
        if len(revs_ids) > revs_limit + 1:
 
            uniq_id = revs_ids[0]
 
            html_tmpl = (
 
                '<span> %s <a class="show_more" id="_%s" '
 
                'href="#more">%s</a> %s</span>'
 
            )
 
            if not feed:
 
                cs_links.append(html_tmpl % (
 
                      _('and'),
 
                      uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
 
                      _('revisions')
 
                    )
 
                )
 

	
 
            if not feed:
 
                html_tmpl = '<span id="%s" style="display:none">, %s </span>'
 
            else:
 
                html_tmpl = '<span id="%s"> %s </span>'
 

	
 
            morelinks = ', '.join(
 
              [lnk(rev, repo_name) for rev in revs[revs_limit:]]
 
            )
 

	
 
            if len(revs_ids) > revs_top_limit:
 
                morelinks += ', ...'
 

	
 
            cs_links.append(html_tmpl % (uniq_id, morelinks))
 
        if len(revs) > 1:
 
            cs_links.append(compare_view)
 
        return ''.join(cs_links)
 

	
 
    def get_fork_name():
 
        repo_name = action_params
 
        return _('fork name ') + str(link_to(action_params, url('summary_home',
 
                                          repo_name=repo_name,)))
 

	
 
    action_map = {'user_deleted_repo': (_('[deleted] repository'), None),
 
           'user_created_repo': (_('[created] repository'), None),
 
           'user_created_fork': (_('[created] repository as fork'), None),
 
           'user_forked_repo': (_('[forked] repository'), get_fork_name),
 
           'user_updated_repo': (_('[updated] repository'), None),
 
           'admin_deleted_repo': (_('[delete] repository'), None),
 
           'admin_created_repo': (_('[created] repository'), None),
 
           'admin_forked_repo': (_('[forked] repository'), None),
 
           'admin_updated_repo': (_('[updated] repository'), None),
 
           'push': (_('[pushed] into'), get_cs_links),
 
           'push_local': (_('[committed via RhodeCode] into'), get_cs_links),
 
           'push_remote': (_('[pulled from remote] into'), get_cs_links),
 
           'pull': (_('[pulled] from'), None),
 
           'started_following_repo': (_('[started following] repository'), None),
 
           'stopped_following_repo': (_('[stopped following] repository'), None),
 
            }
 

	
 
    action_str = action_map.get(action, action)
 
    if feed:
 
        action = action_str[0].replace('[', '').replace(']', '')
 
    else:
 
        action = action_str[0]\
 
            .replace('[', '<span class="journal_highlight">')\
 
            .replace(']', '</span>')
 

	
 
    action_params_func = lambda: ""
 

	
 
    if callable(action_str[1]):
 
        action_params_func = action_str[1]
 

	
 
    return [literal(action), action_params_func]
 

	
 

	
 
def action_parser_icon(user_log):
 
    action = user_log.action
 
    action_params = None
 
    x = action.split(':')
 

	
 
    if len(x) > 1:
 
        action, action_params = x
 

	
 
    tmpl = """<img src="%s%s" alt="%s"/>"""
 
    map = {'user_deleted_repo':'database_delete.png',
 
           'user_created_repo':'database_add.png',
 
           'user_created_fork':'arrow_divide.png',
 
           'user_forked_repo':'arrow_divide.png',
 
           'user_updated_repo':'database_edit.png',
 
           'admin_deleted_repo':'database_delete.png',
 
           'admin_created_repo':'database_add.png',
 
           'admin_forked_repo':'arrow_divide.png',
 
           'admin_updated_repo':'database_edit.png',
 
           'push':'script_add.png',
 
           'push_local':'script_edit.png',
 
           'push_remote':'connect.png',
 
           'pull':'down_16.png',
 
           'started_following_repo':'heart_add.png',
 
           'stopped_following_repo':'heart_delete.png',
 
            }
 
    return literal(tmpl % ((url('/images/icons/')),
 
                           map.get(action, action), action))
 

	
 

	
 
#==============================================================================
 
# PERMS
 
#==============================================================================
 
from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
 
HasRepoPermissionAny, HasRepoPermissionAll
 

	
 

	
 
#==============================================================================
 
# GRAVATAR URL
 
#==============================================================================
 

	
 
def gravatar_url(email_address, size=30):
 
    if (not str2bool(config['app_conf'].get('use_gravatar')) or
 
        not email_address or email_address == 'anonymous@rhodecode.org'):
 
        f = lambda a, l: min(l, key=lambda x: abs(x - a))
 
        return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30]))
 

	
 
    ssl_enabled = 'https' == request.environ.get('wsgi.url_scheme')
 
    default = 'identicon'
 
    baseurl_nossl = "http://www.gravatar.com/avatar/"
 
    baseurl_ssl = "https://secure.gravatar.com/avatar/"
 
    baseurl = baseurl_ssl if ssl_enabled else baseurl_nossl
 

	
 
    if isinstance(email_address, unicode):
 
        #hashlib crashes on unicode items
 
        email_address = safe_str(email_address)
 
    # construct the url
 
    gravatar_url = baseurl + hashlib.md5(email_address.lower()).hexdigest() + "?"
 
    gravatar_url += urllib.urlencode({'d': default, 's': str(size)})
 

	
 
    return gravatar_url
 

	
 

	
 
#==============================================================================
 
# REPO PAGER, PAGER FOR REPOSITORY
 
#==============================================================================
 
class RepoPage(Page):
 

	
 
    def __init__(self, collection, page=1, items_per_page=20,
 
                 item_count=None, url=None, **kwargs):
 

	
 
        """Create a "RepoPage" instance. special pager for paging
 
        repository
 
        """
 
        self._url_generator = url
 

	
 
        # Safe the kwargs class-wide so they can be used in the pager() method
 
        self.kwargs = kwargs
 

	
 
        # Save a reference to the collection
 
        self.original_collection = collection
 

	
 
        self.collection = collection
 

	
 
        # The self.page is the number of the current page.
 
        # The first page has the number 1!
 
        try:
 
            self.page = int(page)  # make it int() if we get it as a string
 
        except (ValueError, TypeError):
 
            self.page = 1
 

	
 
        self.items_per_page = items_per_page
 

	
 
        # Unless the user tells us how many items the collections has
 
        # we calculate that ourselves.
 
        if item_count is not None:
 
            self.item_count = item_count
 
        else:
 
            self.item_count = len(self.collection)
 

	
 
        # Compute the number of the first and last available page
 
        if self.item_count > 0:
 
            self.first_page = 1
 
            self.page_count = int(math.ceil(float(self.item_count) /
 
                                            self.items_per_page))
 
            self.last_page = self.first_page + self.page_count - 1
 

	
 
            # Make sure that the requested page number is the range of
 
            # valid pages
 
            if self.page > self.last_page:
 
                self.page = self.last_page
 
            elif self.page < self.first_page:
 
                self.page = self.first_page
 

	
 
            # Note: the number of items on this page can be less than
 
            #       items_per_page if the last page is not full
 
            self.first_item = max(0, (self.item_count) - (self.page *
 
                                                          items_per_page))
 
            self.last_item = ((self.item_count - 1) - items_per_page *
 
                              (self.page - 1))
 

	
 
            self.items = list(self.collection[self.first_item:self.last_item + 1])
 

	
 
            # Links to previous and next page
 
            if self.page > self.first_page:
 
                self.previous_page = self.page - 1
 
            else:
 
                self.previous_page = None
 

	
 
            if self.page < self.last_page:
 
                self.next_page = self.page + 1
 
            else:
 
                self.next_page = None
 

	
 
        # No items available
 
        else:
 
            self.first_page = None
 
            self.page_count = 0
 
            self.last_page = None
 
            self.first_item = None
 
            self.last_item = None
 
            self.previous_page = None
 
            self.next_page = None
 
            self.items = []
 

	
 
        # This is a subclass of the 'list' type. Initialise the list now.
 
        list.__init__(self, reversed(self.items))
 

	
 

	
 
def changed_tooltip(nodes):
 
    """
 
    Generates a html string for changed nodes in changeset page.
 
    It limits the output to 30 entries
 

	
 
    :param nodes: LazyNodesGenerator
 
    """
 
    if nodes:
 
        pref = ': <br/> '
 
        suf = ''
 
        if len(nodes) > 30:
 
            suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
 
        return literal(pref + '<br/> '.join([safe_unicode(x.path)
rhodecode/lib/hooks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.hooks
 
    ~~~~~~~~~~~~~~~~~~~
 

	
 
    Hooks runned by rhodecode
 

	
 
    :created_on: Aug 6, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import os
 
import sys
 

	
 
from mercurial.scmutil import revrange
 
from mercurial.node import nullrev
 
from rhodecode import EXTENSIONS
 
from rhodecode.lib import helpers as h
 
from rhodecode.lib.utils import action_logger
 
from inspect import isfunction
 

	
 

	
 
def repo_size(ui, repo, hooktype=None, **kwargs):
 
    """
 
    Presents size of repository after push
 

	
 
    :param ui:
 
    :param repo:
 
    :param hooktype:
 
    """
 

	
 
    size_hg, size_root = 0, 0
 
    for path, dirs, files in os.walk(repo.root):
 
        if path.find('.hg') != -1:
 
            for f in files:
 
                try:
 
                    size_hg += os.path.getsize(os.path.join(path, f))
 
                except OSError:
 
                    pass
 
        else:
 
            for f in files:
 
                try:
 
                    size_root += os.path.getsize(os.path.join(path, f))
 
                except OSError:
 
                    pass
 

	
 
    size_hg_f = h.format_byte_size(size_hg)
 
    size_root_f = h.format_byte_size(size_root)
 
    size_total_f = h.format_byte_size(size_root + size_hg)
 

	
 
    last_cs = repo[len(repo) - 1]
 

	
 
    msg = ('Repository size .hg:%s repo:%s total:%s\n'
 
           'Last revision is now r%s:%s\n') % (
 
        size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
 
    )
 

	
 
    sys.stdout.write(msg)
 

	
 

	
 
def log_pull_action(ui, repo, **kwargs):
 
    """
 
    Logs user last pull action
 

	
 
    :param ui:
 
    :param repo:
 
    """
 

	
 
    extras = dict(repo.ui.configitems('rhodecode_extras'))
 
    username = extras['username']
 
    repository = extras['repository']
 
    action = 'pull'
 

	
 
    action_logger(username, action, repository, extras['ip'], commit=True)
 
    # extension hook call
 
    callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
 

	
 
    if isfunction(callback):
 
        kw = {}
 
        kw.update(extras)
 
        callback(**kw)
 
    return 0
 

	
 

	
 
def log_push_action(ui, repo, **kwargs):
 
    """
 
    Maps user last push action to new changeset id, from mercurial
 

	
 
    :param ui:
 
    :param repo:
 
    """
 

	
 
    extras = dict(repo.ui.configitems('rhodecode_extras'))
 
    username = extras['username']
 
    repository = extras['repository']
 
    action = extras['action'] + ':%s'
 
    node = kwargs['node']
 

	
 
    def get_revs(repo, rev_opt):
 
        if rev_opt:
 
            revs = revrange(repo, rev_opt)
 

	
 
            if len(revs) == 0:
 
                return (nullrev, nullrev)
 
            return (max(revs), min(revs))
 
        else:
 
            return (len(repo) - 1, 0)
 

	
 
    stop, start = get_revs(repo, [node + ':'])
 

	
 
    revs = (str(repo[r]) for r in xrange(start, stop + 1))
 

	
 
    action = action % ','.join(revs)
 

	
 
    action_logger(username, action, repository, extras['ip'], commit=True)
 

	
 
    # extension hook call
 
    callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
 
    if isfunction(callback):
 
        kw = {'pushed_revs': revs}
 
        kw.update(extras)
 
        callback(**kw)
 
    return 0
 

	
 

	
 
def log_create_repository(repository_dict, created_by, **kwargs):
 
    """
 
    Post create repository Hook. This is a dummy function for admins to re-use
 
    if needed. It's taken from rhodecode-extensions module and executed 
 
    if needed. It's taken from rhodecode-extensions module and executed
 
    if present
 

	
 
    :param repository: dict dump of repository object
 
    :param created_by: username who created repository
 
    :param created_date: date of creation
 

	
 
    available keys of repository_dict:
 

	
 
     'repo_type',
 
     'description',
 
     'private',
 
     'created_on',
 
     'enable_downloads',
 
     'repo_id',
 
     'user_id',
 
     'enable_statistics',
 
     'clone_uri',
 
     'fork_id',
 
     'group_id',
 
     'repo_name'
 

	
 
    """
 

	
 
    callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None)
 
    if isfunction(callback):
 
        kw = {}
 
        kw.update(repository_dict)
 
        kw.update({'created_by': created_by})
 
        kw.update(kwargs)
 
        return callback(**kw)
 

	
 
    return 0
rhodecode/lib/indexers/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.indexers.__init__
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Whoosh indexing module for RhodeCode
 

	
 
    :created_on: Aug 17, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import os
 
import sys
 
import traceback
 
import logging
 
from os.path import dirname as dn, join as jn
 

	
 
#to get the rhodecode import
 
sys.path.append(dn(dn(dn(os.path.realpath(__file__)))))
 

	
 
from string import strip
 
from shutil import rmtree
 

	
 
from whoosh.analysis import RegexTokenizer, LowercaseFilter, StopFilter
 
from whoosh.fields import TEXT, ID, STORED, Schema, FieldType
 
from whoosh.index import create_in, open_dir
 
from whoosh.formats import Characters
 
from whoosh.highlight import highlight, HtmlFormatter, ContextFragmenter
 

	
 
from webhelpers.html.builder import escape
 
from sqlalchemy import engine_from_config
 

	
 
from rhodecode.model import init_model
 
from rhodecode.model.scm import ScmModel
 
from rhodecode.model.repo import RepoModel
 
from rhodecode.config.environment import load_environment
 
from rhodecode.lib import LANGUAGES_EXTENSIONS_MAP, INDEX_EXTENSIONS, \
 
    LazyProperty
 
from rhodecode.lib.utils import BasePasterCommand, Command, add_cache
 
from rhodecode.lib.utils2 import LazyProperty
 
from rhodecode.lib.utils import BasePasterCommand, Command, add_cache,\
 
    load_rcextensions
 

	
 
# CUSTOM ANALYZER wordsplit + lowercase filter
 
ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
 

	
 

	
 
#INDEX SCHEMA DEFINITION
 
SCHEMA = Schema(
 
    owner=TEXT(),
 
    repository=TEXT(stored=True),
 
    path=TEXT(stored=True),
 
    content=FieldType(format=Characters(), analyzer=ANALYZER,
 
                      scorable=True, stored=True),
 
    modtime=STORED(),
 
    extension=TEXT(stored=True)
 
)
 

	
 
IDX_NAME = 'HG_INDEX'
 
FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
 
FRAGMENTER = ContextFragmenter(200)
 

	
 

	
 
class MakeIndex(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    summary = "Creates index for full text search given configuration file"
 
    group_name = "RhodeCode"
 
    takes_config_file = -1
 
    parser = Command.standard_parser(verbose=True)
 

	
 
    def command(self):
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        from pylons import config
 
        add_cache(config)
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 

	
 
        index_location = config['index_dir']
 
        repo_location = self.options.repo_location \
 
            if self.options.repo_location else RepoModel().repos_path
 
        repo_list = map(strip, self.options.repo_list.split(',')) \
 
            if self.options.repo_list else None
 

	
 
        load_rcextensions(config['here'])
 
        #======================================================================
 
        # WHOOSH DAEMON
 
        #======================================================================
 
        from rhodecode.lib.pidlock import LockHeld, DaemonLock
 
        from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
        try:
 
            l = DaemonLock(file_=jn(dn(dn(index_location)), 'make_index.lock'))
 
            WhooshIndexingDaemon(index_location=index_location,
 
                                 repo_location=repo_location,
 
                                 repo_list=repo_list)\
 
                                 repo_list=repo_list,)\
 
                .run(full_index=self.options.full_index)
 
            l.release()
 
        except LockHeld:
 
            sys.exit(1)
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--repo-location',
 
                          action='store',
 
                          dest='repo_location',
 
                          help="Specifies repositories location to index OPTIONAL",
 
                          )
 
        self.parser.add_option('--index-only',
 
                          action='store',
 
                          dest='repo_list',
 
                          help="Specifies a comma separated list of repositores "
 
                                "to build index on OPTIONAL",
 
                          )
 
        self.parser.add_option('-f',
 
                          action='store_true',
 
                          dest='full_index',
 
                          help="Specifies that index should be made full i.e"
 
                                " destroy old and build from scratch",
 
                          default=False)
 

	
 

	
 
class ResultWrapper(object):
 
    def __init__(self, search_type, searcher, matcher, highlight_items):
 
        self.search_type = search_type
 
        self.searcher = searcher
 
        self.matcher = matcher
 
        self.highlight_items = highlight_items
 
        self.fragment_size = 200
 

	
 
    @LazyProperty
 
    def doc_ids(self):
 
        docs_id = []
 
        while self.matcher.is_active():
 
            docnum = self.matcher.id()
 
            chunks = [offsets for offsets in self.get_chunks()]
 
            docs_id.append([docnum, chunks])
 
            self.matcher.next()
 
        return docs_id
 

	
 
    def __str__(self):
 
        return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __len__(self):
 
        return len(self.doc_ids)
 

	
 
    def __iter__(self):
 
        """
 
        Allows Iteration over results,and lazy generate content
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for docid in self.doc_ids:
 
            yield self.get_full_content(docid)
 

	
 
    def __getitem__(self, key):
 
        """
 
        Slicing of resultWrapper
 
        """
 
        i, j = key.start, key.stop
 

	
 
        slices = []
 
        for docid in self.doc_ids[i:j]:
 
            slices.append(self.get_full_content(docid))
 
        return slices
 

	
 
    def get_full_content(self, docid):
 
        res = self.searcher.stored_fields(docid[0])
 
        f_path = res['path'][res['path'].find(res['repository']) \
 
                             + len(res['repository']):].lstrip('/')
 

	
 
        content_short = self.get_short_content(res, docid[1])
 
        res.update({'content_short': content_short,
 
                    'content_short_hl': self.highlight(content_short),
 
                    'f_path': f_path})
 

	
 
        return res
 

	
 
    def get_short_content(self, res, chunks):
 

	
 
        return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
 

	
 
    def get_chunks(self):
 
        """
 
        Smart function that implements chunking the content
 
        but not overlap chunks so it doesn't highlight the same
 
        close occurrences twice.
 

	
 
        :param matcher:
 
        :param size:
 
        """
 
        memory = [(0, 0)]
 
        for span in self.matcher.spans():
 
            start = span.startchar or 0
 
            end = span.endchar or 0
 
            start_offseted = max(0, start - self.fragment_size)
 
            end_offseted = end + self.fragment_size
 

	
 
            if start_offseted < memory[-1][1]:
 
                start_offseted = memory[-1][1]
 
            memory.append((start_offseted, end_offseted,))
 
            yield (start_offseted, end_offseted,)
 

	
 
    def highlight(self, content, top=5):
 
        if self.search_type != 'content':
 
            return ''
 
        hl = highlight(
 
            text=escape(content),
 
            terms=self.highlight_items,
 
            analyzer=ANALYZER,
 
            fragmenter=FRAGMENTER,
 
            formatter=FORMATTER,
 
            top=top
 
        )
 
        return hl
rhodecode/lib/indexers/daemon.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.indexers.daemon
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    A daemon will read from task table and run tasks
 

	
 
    :created_on: Jan 26, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import sys
 
import logging
 
import traceback
 

	
 
from shutil import rmtree
 
from time import mktime
 

	
 
from os.path import dirname as dn
 
from os.path import join as jn
 

	
 
#to get the rhodecode import
 
project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
 
sys.path.append(project_path)
 

	
 

	
 
from rhodecode.config.conf import INDEX_EXTENSIONS
 
from rhodecode.model.scm import ScmModel
 
from rhodecode.lib import safe_unicode
 
from rhodecode.lib.indexers import INDEX_EXTENSIONS, SCHEMA, IDX_NAME
 
from rhodecode.lib.utils2 import safe_unicode
 
from rhodecode.lib.indexers import SCHEMA, IDX_NAME
 

	
 
from rhodecode.lib.vcs.exceptions import ChangesetError, RepositoryError, \
 
    NodeDoesNotExistError
 

	
 
from whoosh.index import create_in, open_dir
 

	
 
log = logging.getLogger('whoosh_indexer')
 

	
 

	
 
class WhooshIndexingDaemon(object):
 
    """
 
    Daemon for atomic jobs
 
    """
 

	
 
    def __init__(self, indexname=IDX_NAME, index_location=None,
 
                 repo_location=None, sa=None, repo_list=None):
 
        self.indexname = indexname
 

	
 
        self.index_location = index_location
 
        if not index_location:
 
            raise Exception('You have to provide index location')
 

	
 
        self.repo_location = repo_location
 
        if not repo_location:
 
            raise Exception('You have to provide repositories location')
 

	
 
        self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)
 

	
 
        if repo_list:
 
            filtered_repo_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_list:
 
                    filtered_repo_paths[repo_name] = repo
 

	
 
            self.repo_paths = filtered_repo_paths
 

	
 
        self.initial = False
 
        if not os.path.isdir(self.index_location):
 
            os.makedirs(self.index_location)
 
            log.info('Cannot run incremental index since it does not'
 
                     ' yet exist running full build')
 
            self.initial = True
 

	
 
    def get_paths(self, repo):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 
        """
 
        index_paths_ = set()
 
        try:
 
            tip = repo.get_changeset('tip')
 
            for topnode, dirs, files in tip.walk('/'):
 
                for f in files:
 
                    index_paths_.add(jn(repo.path, f.path))
 

	
 
        except RepositoryError, e:
 
            log.debug(traceback.format_exc())
 
            pass
 
        return index_paths_
 

	
 
    def get_node(self, repo, path):
 
        n_path = path[len(repo.path) + 1:]
 
        node = repo.get_changeset().get_node(n_path)
 
        return node
 

	
 
    def get_node_mtime(self, node):
 
        return mktime(node.last_changeset.date.timetuple())
 

	
 
    def add_doc(self, writer, path, repo, repo_name):
 
        """
 
        Adding doc to writer this function itself fetches data from
 
        the instance of vcs backend
 
        """
 

	
 
        node = self.get_node(repo, path)
 

	
 
        indexed = indexed_w_content = 0
 
        # we just index the content of chosen files, and skip binary files
 
        if node.extension in INDEX_EXTENSIONS and not node.is_binary:
 

	
 
            u_content = node.content
 
            if not isinstance(u_content, unicode):
 
                log.warning('  >> %s Could not get this content as unicode '
 
                            'replacing with empty content' % path)
 
                u_content = u''
 
            else:
 
                log.debug('    >> %s [WITH CONTENT]' % path)
 
                indexed_w_content += 1
 

	
 
        else:
 
            log.debug('    >> %s' % path)
 
            # just index file name without it's content
 
            u_content = u''
 
            indexed += 1
 

	
 
        writer.add_document(
 
            owner=unicode(repo.contact),
 
            repository=safe_unicode(repo_name),
 
            path=safe_unicode(path),
 
            content=u_content,
 
            modtime=self.get_node_mtime(node),
 
            extension=node.extension
 
        )
 
        return indexed, indexed_w_content
 

	
 
    def build_index(self):
 
        if os.path.exists(self.index_location):
 
            log.debug('removing previous index')
 
            rmtree(self.index_location)
 

	
 
        if not os.path.exists(self.index_location):
 
            os.mkdir(self.index_location)
 

	
 
        idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
 
        writer = idx.writer()
 

	
 
        log.debug('BUILDIN INDEX FOR EXTENSIONS %s' % INDEX_EXTENSIONS)
 
        for repo_name, repo in self.repo_paths.items():
 
            log.debug('building index @ %s' % repo.path)
 

	
 
            i_cnt = iwc_cnt = 0
 
            for idx_path in self.get_paths(repo):
 
                self.add_doc(writer, idx_path, repo, repo_name)
 
                i, iwc = self.add_doc(writer, idx_path, repo, repo_name)
 
                i_cnt += i
 
                iwc_cnt += iwc
 
            log.debug('added %s files %s with content for repo %s' % (
 
                         i_cnt + iwc_cnt, iwc_cnt, repo.path)
 
            )
 

	
 
        log.debug('>> COMMITING CHANGES <<')
 
        writer.commit(merge=True)
 
        log.debug('>>> FINISHED BUILDING INDEX <<<')
 

	
 
    def update_index(self):
 
        log.debug('STARTING INCREMENTAL INDEXING UPDATE')
 
        log.debug('STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s' %
 
                  INDEX_EXTENSIONS)
 

	
 
        idx = open_dir(self.index_location, indexname=self.indexname)
 
        # The set of all paths in the index
 
        indexed_paths = set()
 
        # The set of all paths we need to re-index
 
        to_index = set()
 

	
 
        reader = idx.reader()
 
        writer = idx.writer()
 

	
 
        # Loop over the stored fields in the index
 
        for fields in reader.all_stored_fields():
 
            indexed_path = fields['path']
 
            indexed_paths.add(indexed_path)
 

	
 
            repo = self.repo_paths[fields['repository']]
 

	
 
            try:
 
                node = self.get_node(repo, indexed_path)
 
            except (ChangesetError, NodeDoesNotExistError):
 
                # This file was deleted since it was indexed
 
                log.debug('removing from index %s' % indexed_path)
 
                writer.delete_by_term('path', indexed_path)
 

	
 
            else:
 
                # Check if this file was changed since it was indexed
 
                indexed_time = fields['modtime']
 
                mtime = self.get_node_mtime(node)
 
                if mtime > indexed_time:
 
                    # The file has changed, delete it and add it to the list of
 
                    # files to reindex
 
                    log.debug('adding to reindex list %s' % indexed_path)
 
                    writer.delete_by_term('path', indexed_path)
 
                    to_index.add(indexed_path)
 

	
 
        # Loop over the files in the filesystem
 
        # Assume we have a function that gathers the filenames of the
 
        # documents to be indexed
 
        ri_cnt = riwc_cnt = 0
 
        for repo_name, repo in self.repo_paths.items():
 
            for path in self.get_paths(repo):
 
                if path in to_index or path not in indexed_paths:
 
                    # This is either a file that's changed, or a new file
 
                    # that wasn't indexed before. So index it!
 
                    self.add_doc(writer, path, repo, repo_name)
 
                    i, iwc = self.add_doc(writer, path, repo, repo_name)
 
                    log.debug('re indexing %s' % path)
 

	
 
                    ri_cnt += i
 
                    riwc_cnt += iwc
 
        log.debug('added %s files %s with content for repo %s' % (
 
                     ri_cnt + riwc_cnt, riwc_cnt, repo.path)
 
        )
 
        log.debug('>> COMMITING CHANGES <<')
 
        writer.commit(merge=True)
 
        log.debug('>>> FINISHED REBUILDING INDEX <<<')
 

	
 
    def run(self, full_index=False):
 
        """Run daemon"""
 
        if full_index or self.initial:
 
            self.build_index()
 
        else:
 
            self.update_index()
rhodecode/lib/markup_renderer.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.markup_renderer
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 

	
 
    Renderer for markup languages with ability to parse using rst or markdown
 

	
 
    :created_on: Oct 27, 2011
 
    :author: marcink
 
    :copyright: (C) 2011-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import re
 
import logging
 

	
 
from rhodecode.lib import safe_unicode
 
from rhodecode.lib.utils2 import safe_unicode
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class MarkupRenderer(object):
 
    RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES = ['include', 'meta', 'raw']
 

	
 
    MARKDOWN_PAT = re.compile(r'md|mkdn?|mdown|markdown', re.IGNORECASE)
 
    RST_PAT = re.compile(r're?st', re.IGNORECASE)
 
    PLAIN_PAT = re.compile(r'readme', re.IGNORECASE)
 

	
 
    def __detect_renderer(self, source, filename=None):
 
        """
 
        runs detection of what renderer should be used for generating html
 
        from a markup language
 

	
 
        filename can be also explicitly a renderer name
 

	
 
        :param source:
 
        :param filename:
 
        """
 

	
 
        if MarkupRenderer.MARKDOWN_PAT.findall(filename):
 
            detected_renderer = 'markdown'
 
        elif MarkupRenderer.RST_PAT.findall(filename):
 
            detected_renderer = 'rst'
 
        elif MarkupRenderer.PLAIN_PAT.findall(filename):
 
            detected_renderer = 'rst'
 
        else:
 
            detected_renderer = 'plain'
 

	
 
        return getattr(MarkupRenderer, detected_renderer)
 

	
 
    def render(self, source, filename=None):
 
        """
 
        Renders a given filename using detected renderer
 
        it detects renderers based on file extension or mimetype.
 
        At last it will just do a simple html replacing new lines with <br/>
 

	
 
        :param file_name:
 
        :param source:
 
        """
 

	
 
        renderer = self.__detect_renderer(source, filename)
 
        readme_data = renderer(source)
 
        return readme_data
 

	
 
    @classmethod
 
    def plain(cls, source):
 
        source = safe_unicode(source)
 

	
 
        def urlify_text(text):
 
            url_pat = re.compile(r'(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]'
 
                                 '|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)')
 

	
 
            def url_func(match_obj):
 
                url_full = match_obj.groups()[0]
 
                return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
 

	
 
            return url_pat.sub(url_func, text)
 

	
 
        source = urlify_text(source)
 
        return '<br />' + source.replace("\n", '<br />')
 

	
 
    @classmethod
 
    def markdown(cls, source):
 
        source = safe_unicode(source)
 
        try:
 
            import markdown as __markdown
 
            return __markdown.markdown(source, ['codehilite'])
 
        except ImportError:
 
            log.warning('Install markdown to use this function')
 
            return cls.plain(source)
 

	
 
    @classmethod
 
    def rst(cls, source):
 
        source = safe_unicode(source)
 
        try:
 
            from docutils.core import publish_parts
 
            from docutils.parsers.rst import directives
 
            docutils_settings = dict([(alias, None) for alias in
 
                                cls.RESTRUCTUREDTEXT_DISALLOWED_DIRECTIVES])
 

	
 
            docutils_settings.update({'input_encoding': 'unicode',
 
                                      'report_level': 4})
 

	
 
            for k, v in docutils_settings.iteritems():
 
                directives.register_directive(k, v)
 

	
 
            parts = publish_parts(source=source,
 
                                  writer_name="html4css1",
 
                                  settings_overrides=docutils_settings)
 

	
 
            return parts['html_title'] + parts["fragment"]
 
        except ImportError:
 
            log.warning('Install docutils to use this function')
 
            return cls.plain(source)
 

	
 
    @classmethod
 
    def rst_with_mentions(cls, source):
 
        mention_pat = re.compile(r'(?:^@|\s@)(\w+)')
 

	
 
        def wrapp(match_obj):
 
            uname = match_obj.groups()[0]
 
            return ' **@%(uname)s** ' % {'uname':uname}
 
        mention_hl = mention_pat.sub(wrapp, source).strip()
 
        return cls.rst(mention_hl)
rhodecode/lib/middleware/https_fixup.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.middleware.https_fixup
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    middleware to handle https correctly
 

	
 
    :created_on: May 23, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from rhodecode.lib import str2bool
 
from rhodecode.lib.utils2 import str2bool
 

	
 

	
 
class HttpsFixup(object):
 

	
 
    def __init__(self, app, config):
 
        self.application = app
 
        self.config = config
 

	
 
    def __call__(self, environ, start_response):
 
        self.__fixup(environ)
 
        return self.application(environ, start_response)
 

	
 
    def __fixup(self, environ):
 
        """
 
        Function to fixup the environ as needed. In order to use this
 
        middleware you should set this header inside your
 
        proxy ie. nginx, apache etc.
 
        """
 

	
 
        if str2bool(self.config.get('force_https')):
 
            proto = 'https'
 
        else:
 
            if 'HTTP_X_URL_SCHEME' in environ:
 
                proto = environ.get('HTTP_X_URL_SCHEME')
 
            elif 'HTTP_X_FORWARDED_SCHEME' in environ:
 
                proto = environ.get('HTTP_X_FORWARDED_SCHEME')
 
            elif 'HTTP_X_FORWARDED_PROTO' in environ:
 
                proto = environ.get('HTTP_X_FORWARDED_PROTO')
 
            else:
 
                proto = 'http'
 
        if proto == 'https':
 
            environ['wsgi.url_scheme'] = proto
 
        else:
 
            environ['wsgi.url_scheme'] = 'http'
 

	
 
        return None
rhodecode/lib/middleware/simplegit.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.middleware.simplegit
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    SimpleGit middleware for handling git protocol request (push/clone etc.)
 
    It's implemented with basic auth function
 

	
 
    :created_on: Apr 28, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
import re
 
import logging
 
import traceback
 

	
 
from dulwich import server as dulserver
 

	
 

	
 
class SimpleGitUploadPackHandler(dulserver.UploadPackHandler):
 

	
 
    def handle(self):
 
        write = lambda x: self.proto.write_sideband(1, x)
 

	
 
        graph_walker = dulserver.ProtocolGraphWalker(self,
 
                                                     self.repo.object_store,
 
                                                     self.repo.get_peeled)
 
        objects_iter = self.repo.fetch_objects(
 
          graph_walker.determine_wants, graph_walker, self.progress,
 
          get_tagged=self.get_tagged)
 

	
 
        # Do they want any objects?
 
        if objects_iter is None or len(objects_iter) == 0:
 
            return
 

	
 
        self.progress("counting objects: %d, done.\n" % len(objects_iter))
 
        dulserver.write_pack_objects(dulserver.ProtocolFile(None, write),
 
                                  objects_iter, len(objects_iter))
 
        messages = []
 
        messages.append('thank you for using rhodecode')
 

	
 
        for msg in messages:
 
            self.progress(msg + "\n")
 
        # we are done
 
        self.proto.write("0000")
 

	
 
dulserver.DEFAULT_HANDLERS = {
 
  'git-upload-pack': SimpleGitUploadPackHandler,
 
  'git-receive-pack': dulserver.ReceivePackHandler,
 
}
 

	
 
from dulwich.repo import Repo
 
from dulwich.web import HTTPGitApplication
 

	
 
from paste.httpheaders import REMOTE_USER, AUTH_TYPE
 

	
 
from rhodecode.lib import safe_str
 
from rhodecode.lib.utils2 import safe_str
 
from rhodecode.lib.base import BaseVCSController
 
from rhodecode.lib.auth import get_container_username
 
from rhodecode.lib.utils import is_valid_repo
 
from rhodecode.model.db import User
 

	
 
from webob.exc import HTTPNotFound, HTTPForbidden, HTTPInternalServerError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
GIT_PROTO_PAT = re.compile(r'^/(.+)/(info/refs|git-upload-pack|git-receive-pack)')
 

	
 

	
 
def is_git(environ):
 
    path_info = environ['PATH_INFO']
 
    isgit_path = GIT_PROTO_PAT.match(path_info)
 
    log.debug('pathinfo: %s detected as GIT %s' % (
 
        path_info, isgit_path != None)
 
    )
 
    return isgit_path
 

	
 

	
 
class SimpleGit(BaseVCSController):
 

	
 
    def _handle_request(self, environ, start_response):
 

	
 
        if not is_git(environ):
 
            return self.application(environ, start_response)
 

	
 
        proxy_key = 'HTTP_X_REAL_IP'
 
        def_key = 'REMOTE_ADDR'
 
        ipaddr = environ.get(proxy_key, environ.get(def_key, '0.0.0.0'))
 
        username = None
 
        # skip passing error to error controller
 
        environ['pylons.status_code_redirect'] = True
 

	
 
        #======================================================================
 
        # EXTRACT REPOSITORY NAME FROM ENV
 
        #======================================================================
 
        try:
 
            repo_name = self.__get_repository(environ)
 
            log.debug('Extracted repo name is %s' % repo_name)
 
        except:
 
            return HTTPInternalServerError()(environ, start_response)
 

	
 
        #======================================================================
 
        # GET ACTION PULL or PUSH
 
        #======================================================================
 
        action = self.__get_action(environ)
 

	
 
        #======================================================================
 
        # CHECK ANONYMOUS PERMISSION
 
        #======================================================================
 
        if action in ['pull', 'push']:
 
            anonymous_user = self.__get_user('default')
 
            username = anonymous_user.username
 
            anonymous_perm = self._check_permission(action, anonymous_user,
 
                                                    repo_name)
 

	
 
            if anonymous_perm is not True or anonymous_user.active is False:
 
                if anonymous_perm is not True:
 
                    log.debug('Not enough credentials to access this '
 
                              'repository as anonymous user')
 
                if anonymous_user.active is False:
 
                    log.debug('Anonymous access is disabled, running '
 
                              'authentication')
 
                #==============================================================
 
                # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
 
                # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
 
                #==============================================================
 

	
 
                # Attempting to retrieve username from the container
 
                username = get_container_username(environ, self.config)
 

	
 
                # If not authenticated by the container, running basic auth
 
                if not username:
 
                    self.authenticate.realm = \
 
                        safe_str(self.config['rhodecode_realm'])
 
                    result = self.authenticate(environ)
 
                    if isinstance(result, str):
 
                        AUTH_TYPE.update(environ, 'basic')
 
                        REMOTE_USER.update(environ, result)
 
                        username = result
 
                    else:
 
                        return result.wsgi_application(environ, start_response)
 

	
 
                #==============================================================
 
                # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
 
                #==============================================================
 
                if action in ['pull', 'push']:
 
                    try:
 
                        user = self.__get_user(username)
 
                        if user is None or not user.active:
 
                            return HTTPForbidden()(environ, start_response)
 
                        username = user.username
 
                    except:
 
                        log.error(traceback.format_exc())
 
                        return HTTPInternalServerError()(environ,
 
                                                         start_response)
 

	
 
                    #check permissions for this repository
 
                    perm = self._check_permission(action, user, repo_name)
 
                    if perm is not True:
 
                        return HTTPForbidden()(environ, start_response)
 

	
 
        #===================================================================
 
        # GIT REQUEST HANDLING
 
        #===================================================================
 
        repo_path = os.path.join(safe_str(self.basepath), safe_str(repo_name))
 
        log.debug('Repository path is %s' % repo_path)
 

	
 
        # quick check if that dir exists...
 
        if is_valid_repo(repo_name, self.basepath) is False:
 
            return HTTPNotFound()(environ, start_response)
 

	
 
        try:
 
            #invalidate cache on push
 
            if action == 'push':
 
                self._invalidate_cache(repo_name)
 
            log.info('%s action on GIT repo "%s"' % (action, repo_name))
 
            app = self.__make_app(repo_name, repo_path)
 
            return app(environ, start_response)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return HTTPInternalServerError()(environ, start_response)
 

	
 
    def __make_app(self, repo_name, repo_path):
 
        """
 
        Make an wsgi application using dulserver
 

	
 
        :param repo_name: name of the repository
 
        :param repo_path: full path to the repository
 
        """
 
        _d = {'/' + repo_name: Repo(repo_path)}
 
        backend = dulserver.DictBackend(_d)
 
        gitserve = HTTPGitApplication(backend)
 

	
 
        return gitserve
 

	
 
    def __get_repository(self, environ):
 
        """
 
        Get's repository name out of PATH_INFO header
 

	
 
        :param environ: environ where PATH_INFO is stored
 
        """
 
        try:
 
            environ['PATH_INFO'] = self._get_by_id(environ['PATH_INFO'])
 
            repo_name = GIT_PROTO_PAT.match(environ['PATH_INFO']).group(1)
 
        except:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
        return repo_name
 

	
 
    def __get_user(self, username):
 
        return User.get_by_username(username)
 

	
 
    def __get_action(self, environ):
 
        """
 
        Maps git request commands into a pull or push command.
 

	
 
        :param environ:
 
        """
 
        service = environ['QUERY_STRING'].split('=')
 

	
 
        if len(service) > 1:
 
            service_cmd = service[1]
 
            mapping = {
 
                'git-receive-pack': 'push',
 
                'git-upload-pack': 'pull',
 
            }
 
            op = mapping[service_cmd]
 
            self._git_stored_op = op
 
            return op
 
        else:
 
            # try to fallback to stored variable as we don't know if the last
 
            # operation is pull/push
 
            op = getattr(self, '_git_stored_op', 'pull')
 
        return op

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)