Changeset - dc960653f8d2
[Not reviewed]
beta
0 2 0
Marcin Kuzminski - 14 years ago 2011-06-29 00:41:51
marcin@python-works.com
Added new random directory for each test to be better sandboxed
2 files changed with 7 insertions and 2 deletions:
0 comments (0 inline, 0 general)
rhodecode/lib/utils.py
Show inline comments
 
@@ -133,487 +133,492 @@ def action_logger(user, action, repo, ip
 
        user_log.action = action
 

	
 
        user_log.repository_id = repo_obj.repo_id
 
        user_log.repository_name = repo_name
 

	
 
        user_log.action_date = datetime.datetime.now()
 
        user_log.user_ip = ipaddr
 
        sa.add(user_log)
 
        sa.commit()
 

	
 
        log.info('Adding user %s, action %s on %s', user_obj, action, repo)
 
    except:
 
        log.error(traceback.format_exc())
 
        sa.rollback()
 

	
 

	
 
def get_repos(path, recursive=False):
 
    """
 
    Scans given path for repos and return (name,(type,path)) tuple
 

	
 
    :param path: path to scann for repositories
 
    :param recursive: recursive search and return names with subdirs in front
 
    """
 
    from vcs.utils.helpers import get_scm
 
    from vcs.exceptions import VCSError
 

	
 
    if path.endswith(os.sep):
 
        #remove ending slash for better results
 
        path = path[:-1]
 

	
 
    def _get_repos(p):
 
        if not os.access(p, os.W_OK):
 
            return
 
        for dirpath in os.listdir(p):
 
            if os.path.isfile(os.path.join(p, dirpath)):
 
                continue
 
            cur_path = os.path.join(p, dirpath)
 
            try:
 
                scm_info = get_scm(cur_path)
 
                yield scm_info[1].split(path)[-1].lstrip(os.sep), scm_info
 
            except VCSError:
 
                if not recursive:
 
                    continue
 
                #check if this dir containts other repos for recursive scan
 
                rec_path = os.path.join(p, dirpath)
 
                if os.path.isdir(rec_path):
 
                    for inner_scm in _get_repos(rec_path):
 
                        yield inner_scm
 

	
 
    return _get_repos(path)
 

	
 

	
 
def check_repo_fast(repo_name, base_path):
 
    """
 
    Check given path for existence of directory
 
    :param repo_name:
 
    :param base_path:
 

	
 
    :return False: if this directory is present
 
    """
 
    if os.path.isdir(os.path.join(base_path, repo_name)):
 
        return False
 
    return True
 

	
 

	
 
def check_repo(repo_name, base_path, verify=True):
 

	
 
    repo_path = os.path.join(base_path, repo_name)
 

	
 
    try:
 
        if not check_repo_fast(repo_name, base_path):
 
            return False
 
        r = hg.repository(ui.ui(), repo_path)
 
        if verify:
 
            hg.verify(r)
 
        #here we hnow that repo exists it was verified
 
        log.info('%s repo is already created', repo_name)
 
        return False
 
    except RepoError:
 
        #it means that there is no valid repo there...
 
        log.info('%s repo is free for creation', repo_name)
 
        return True
 

	
 

	
 
def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
 
    while True:
 
        ok = raw_input(prompt)
 
        if ok in ('y', 'ye', 'yes'):
 
            return True
 
        if ok in ('n', 'no', 'nop', 'nope'):
 
            return False
 
        retries = retries - 1
 
        if retries < 0:
 
            raise IOError
 
        print complaint
 

	
 
#propagated from mercurial documentation
 
ui_sections = ['alias', 'auth',
 
                'decode/encode', 'defaults',
 
                'diff', 'email',
 
                'extensions', 'format',
 
                'merge-patterns', 'merge-tools',
 
                'hooks', 'http_proxy',
 
                'smtp', 'patch',
 
                'paths', 'profiling',
 
                'server', 'trusted',
 
                'ui', 'web', ]
 

	
 

	
 
def make_ui(read_from='file', path=None, checkpaths=True):
 
    """A function that will read python rc files or database
 
    and make an mercurial ui object from read options
 

	
 
    :param path: path to mercurial config file
 
    :param checkpaths: check the path
 
    :param read_from: read from 'file' or 'db'
 
    """
 

	
 
    baseui = ui.ui()
 

	
 
    #clean the baseui object
 
    baseui._ocfg = config.config()
 
    baseui._ucfg = config.config()
 
    baseui._tcfg = config.config()
 

	
 
    if read_from == 'file':
 
        if not os.path.isfile(path):
 
            log.warning('Unable to read config file %s' % path)
 
            return False
 
        log.debug('reading hgrc from %s', path)
 
        cfg = config.config()
 
        cfg.read(path)
 
        for section in ui_sections:
 
            for k, v in cfg.items(section):
 
                log.debug('settings ui from file[%s]%s:%s', section, k, v)
 
                baseui.setconfig(section, k, v)
 

	
 
    elif read_from == 'db':
 
        sa = meta.Session()
 
        ret = sa.query(RhodeCodeUi)\
 
            .options(FromCache("sql_cache_short",
 
                               "get_hg_ui_settings")).all()
 

	
 
        hg_ui = ret
 
        for ui_ in hg_ui:
 
            if ui_.ui_active:
 
                log.debug('settings ui from db[%s]%s:%s', ui_.ui_section,
 
                          ui_.ui_key, ui_.ui_value)
 
                baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value)
 

	
 
        meta.Session.remove()
 
    return baseui
 

	
 

	
 
def set_rhodecode_config(config):
 
    """Updates pylons config with new settings from database
 

	
 
    :param config:
 
    """
 
    hgsettings = RhodeCodeSettings.get_app_settings()
 

	
 
    for k, v in hgsettings.items():
 
        config[k] = v
 

	
 

	
 
def invalidate_cache(cache_key, *args):
 
    """Puts cache invalidation task into db for
 
    further global cache invalidation
 
    """
 

	
 
    from rhodecode.model.scm import ScmModel
 

	
 
    if cache_key.startswith('get_repo_cached_'):
 
        name = cache_key.split('get_repo_cached_')[-1]
 
        ScmModel().mark_for_invalidation(name)
 

	
 

	
 
class EmptyChangeset(BaseChangeset):
 
    """
 
    An dummy empty changeset. It's possible to pass hash when creating
 
    an EmptyChangeset
 
    """
 

	
 
    def __init__(self, cs='0' * 40, repo=None):
 
        self._empty_cs = cs
 
        self.revision = -1
 
        self.message = ''
 
        self.author = ''
 
        self.date = ''
 
        self.repository = repo
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """Returns raw string identifying this changeset, useful for web
 
        representation.
 
        """
 

	
 
        return self._empty_cs
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    def get_file_changeset(self, path):
 
        return self
 

	
 
    def get_file_content(self, path):
 
        return u''
 

	
 
    def get_file_size(self, path):
 
        return 0
 

	
 

	
 
def map_groups(groups):
 
    """Checks for groups existence, and creates groups structures.
 
    It returns last group in structure
 

	
 
    :param groups: list of groups structure
 
    """
 
    sa = meta.Session()
 

	
 
    parent = None
 
    group = None
 
    for lvl, group_name in enumerate(groups[:-1]):
 
        group = sa.query(Group).filter(Group.group_name == group_name).scalar()
 

	
 
        if group is None:
 
            group = Group(group_name, parent)
 
            sa.add(group)
 
            sa.commit()
 

	
 
        parent = group
 

	
 
    return group
 

	
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False):
 
    """maps all repos given in initial_repo_list, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_list and removes them.
 

	
 
    :param initial_repo_list: list of repositories found by scanning methods
 
    :param remove_obsolete: check for obsolete entries in database
 
    """
 

	
 
    sa = meta.Session()
 
    rm = RepoModel()
 
    user = sa.query(User).filter(User.admin == True).first()
 
    added = []
 
    for name, repo in initial_repo_list.items():
 
        group = map_groups(name.split('/'))
 
        if not rm.get_by_repo_name(name, cache=False):
 
            log.info('repository %s not found creating default', name)
 
            added.append(name)
 
            form_data = {
 
                         'repo_name': name,
 
                         'repo_name_full': name,
 
                         'repo_type': repo.alias,
 
                         'description': repo.description \
 
                            if repo.description != 'unknown' else \
 
                                        '%s repository' % name,
 
                         'private': False,
 
                         'group_id': getattr(group, 'group_id', None)
 
                         }
 
            rm.create(form_data, user, just_db=True)
 

	
 
    removed = []
 
    if remove_obsolete:
 
        #remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                removed.append(repo.repo_name)
 
                sa.delete(repo)
 
                sa.commit()
 

	
 
    return added, removed
 

	
 
#set cache regions for beaker so celery can utilise it
 
def add_cache(settings):
 
    cache_settings = {'regions': None}
 
    for key in settings.keys():
 
        for prefix in ['beaker.cache.', 'cache.']:
 
            if key.startswith(prefix):
 
                name = key.split(prefix)[1].strip()
 
                cache_settings[name] = settings[key].strip()
 
    if cache_settings['regions']:
 
        for region in cache_settings['regions'].split(','):
 
            region = region.strip()
 
            region_settings = {}
 
            for key, value in cache_settings.items():
 
                if key.startswith(region):
 
                    region_settings[key.split('.')[1]] = value
 
            region_settings['expire'] = int(region_settings.get('expire',
 
                                                                60))
 
            region_settings.setdefault('lock_dir',
 
                                       cache_settings.get('lock_dir'))
 
            region_settings.setdefault('data_dir',
 
                                       cache_settings.get('data_dir'))
 

	
 
            if 'type' not in region_settings:
 
                region_settings['type'] = cache_settings.get('type',
 
                                                             'memory')
 
            beaker.cache.cache_regions[region] = region_settings
 

	
 

	
 
def get_current_revision():
 
    """Returns tuple of (number, id) from repository containing this package
 
    or None if repository could not be found.
 
    """
 

	
 
    try:
 
        from vcs import get_repo
 
        from vcs.utils.helpers import get_scm
 
        from vcs.exceptions import RepositoryError, VCSError
 
        repopath = os.path.join(os.path.dirname(__file__), '..', '..')
 
        scm = get_scm(repopath)[0]
 
        repo = get_repo(path=repopath, alias=scm)
 
        tip = repo.get_changeset()
 
        return (tip.revision, tip.short_id)
 
    except (ImportError, RepositoryError, VCSError), err:
 
        logging.debug("Cannot retrieve rhodecode's revision. Original error "
 
                      "was: %s" % err)
 
        return None
 

	
 

	
 
#==============================================================================
 
# TEST FUNCTIONS AND CREATORS
 
#==============================================================================
 
def create_test_index(repo_location, full_index):
 
    """Makes default test index
 
    :param repo_location:
 
    :param full_index:
 
    """
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    from rhodecode.lib.pidlock import DaemonLock, LockHeld
 
    import shutil
 

	
 
    index_location = os.path.join(repo_location, 'index')
 
    if os.path.exists(index_location):
 
        shutil.rmtree(index_location)
 

	
 
    try:
 
        l = DaemonLock(file=jn(dn(index_location), 'make_index.lock'))
 
        WhooshIndexingDaemon(index_location=index_location,
 
                             repo_location=repo_location)\
 
            .run(full_index=full_index)
 
        l.release()
 
    except LockHeld:
 
        pass
 

	
 

	
 
def create_test_env(repos_test_path, config):
 
    """Makes a fresh database and
 
    install test repository into tmp dir
 
    """
 
    from rhodecode.lib.db_manage import DbManage
 
    from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
 
        HG_FORK, GIT_FORK, TESTS_TMP_PATH
 
    import tarfile
 
    import shutil
 
    from os.path import dirname as dn, join as jn, abspath
 

	
 
    log = logging.getLogger('TestEnvCreator')
 
    # create logger
 
    log.setLevel(logging.DEBUG)
 
    log.propagate = True
 
    # create console handler and set level to debug
 
    ch = logging.StreamHandler()
 
    ch.setLevel(logging.DEBUG)
 

	
 
    # create formatter
 
    formatter = logging.Formatter("%(asctime)s - %(name)s -"
 
                                  " %(levelname)s - %(message)s")
 

	
 
    # add formatter to ch
 
    ch.setFormatter(formatter)
 

	
 
    # add ch to logger
 
    log.addHandler(ch)
 

	
 
    #PART ONE create db
 
    dbconf = config['sqlalchemy.db1.url']
 
    log.debug('making test db %s', dbconf)
 

	
 
    # create test dir if it doesn't exist
 
    if not os.path.isdir(repos_test_path):
 
        log.debug('Creating testdir %s' % repos_test_path)
 
        os.makedirs(repos_test_path)
 

	
 
    dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
 
                        tests=True)
 
    dbmanage.create_tables(override=True)
 
    dbmanage.create_settings(dbmanage.config_prompt(repos_test_path))
 
    dbmanage.create_default_user()
 
    dbmanage.admin_prompt()
 
    dbmanage.create_permissions()
 
    dbmanage.populate_default_permissions()
 

	
 
    #PART TWO make test repo
 
    log.debug('making test vcs repositories')
 

	
 
    #remove old one from previos tests
 
    for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]:
 

	
 
        if os.path.isdir(jn(TESTS_TMP_PATH, r)):
 
            log.debug('removing %s', r)
 
            shutil.rmtree(jn(TESTS_TMP_PATH, r))
 

	
 
    idx_path = config['app_conf']['index_dir']
 
    data_path = config['app_conf']['cache_dir']
 

	
 
    #clean index and data
 
    if idx_path and os.path.exists(idx_path):
 
        log.debug('remove %s' % idx_path)
 
        shutil.rmtree(idx_path)
 

	
 
    if data_path and os.path.exists(data_path):
 
        log.debug('remove %s' % data_path)
 
        shutil.rmtree(data_path)
 

	
 
    #CREATE DEFAULT HG REPOSITORY
 
    cur_dir = dn(dn(abspath(__file__)))
 
    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
 
    tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
 
    tar.close()
 

	
 

	
 
#==============================================================================
 
# PASTER COMMANDS
 
#==============================================================================
 
class BasePasterCommand(Command):
 
    """
 
    Abstract Base Class for paster commands.
 

	
 
    The celery commands are somewhat aggressive about loading
 
    celery.conf, and since our module sets the `CELERY_LOADER`
 
    environment variable to our loader, we have to bootstrap a bit and
 
    make sure we've had a chance to load the pylons config off of the
 
    command line, otherwise everything fails.
 
    """
 
    min_args = 1
 
    min_args_error = "Please provide a paster config file as an argument."
 
    takes_config_file = 1
 
    requires_config_file = True
 

	
 
    def notify_msg(self, msg, log=False):
 
        """Make a notification to user, additionally if logger is passed
 
        it logs this action using given logger
 

	
 
        :param msg: message that will be printed to user
 
        :param log: logging instance, to use to additionally log this message
 

	
 
        """
 
        if log and isinstance(log, logging):
 
            log(msg)
 

	
 
    def run(self, args):
 
        """
 
        Overrides Command.run
 

	
 
        Checks for a config file argument and loads it.
 
        """
 
        if len(args) < self.min_args:
 
            raise BadCommand(
 
                self.min_args_error % {'min_args': self.min_args,
 
                                       'actual_args': len(args)})
 

	
 
        # Decrement because we're going to lob off the first argument.
 
        # @@ This is hacky
 
        self.min_args -= 1
 
        self.bootstrap_config(args[0])
 
        self.update_parser()
 
        return super(BasePasterCommand, self).run(args[1:])
 

	
 
    def update_parser(self):
 
        """
 
        Abstract method.  Allows for the class's parser to be updated
 
        before the superclass's `run` method is called.  Necessary to
 
        allow options/arguments to be passed through to the underlying
 
        celery command.
 
        """
 
        raise NotImplementedError("Abstract Method.")
 

	
 
    def bootstrap_config(self, conf):
 
        """
 
        Loads the pylons configuration.
 
        """
 
        from pylons import config as pylonsconfig
 

	
 
        path_to_ini_file = os.path.realpath(conf)
 
        conf = paste.deploy.appconfig('config:' + path_to_ini_file)
 
        pylonsconfig.init_app(conf.global_conf, conf.local_conf)
rhodecode/tests/__init__.py
Show inline comments
 
"""Pylons application test package
 

	
 
This package assumes the Pylons environment is already loaded, such as
 
when this script is imported from the `nosetests --with-pylons=test.ini`
 
command.
 

	
 
This module initializes the application via ``websetup`` (`paster
 
setup-app`) and provides the base testing objects.
 
"""
 
import os
 
from os.path import join as jn
 

	
 
from unittest import TestCase
 

	
 
from paste.deploy import loadapp
 
from paste.script.appinstall import SetupCommand
 
from pylons import config, url
 
from routes.util import URLGenerator
 
from webtest import TestApp
 

	
 
from rhodecode.model import meta
 
import logging
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
import pylons.test
 

	
 
__all__ = ['environ', 'url', 'TestController', 'TESTS_TMP_PATH', 'HG_REPO',
 
           'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO', 'HG_FORK', 'GIT_FORK', ]
 

	
 
# Invoke websetup with the current config file
 
#SetupCommand('setup-app').run([config_file])
 

	
 
##RUNNING DESIRED TESTS
 
#nosetests -x rhodecode.tests.functional.test_admin_settings:TestSettingsController.test_my_account
 

	
 
environ = {}
 

	
 
#SOME GLOBALS FOR TESTS
 
TESTS_TMP_PATH = jn('/', 'tmp')
 

	
 
from tempfile import _RandomNameSequence
 
TESTS_TMP_PATH = jn('/', 'tmp', 'rc_test_%s' % _RandomNameSequence().next())
 
HG_REPO = 'vcs_test_hg'
 
GIT_REPO = 'vcs_test_git'
 

	
 
NEW_HG_REPO = 'vcs_test_hg_new'
 
NEW_GIT_REPO = 'vcs_test_git_new'
 

	
 
HG_FORK = 'vcs_test_hg_fork'
 
GIT_FORK = 'vcs_test_git_fork'
 

	
 
class TestController(TestCase):
 

	
 
    def __init__(self, *args, **kwargs):
 
        wsgiapp = pylons.test.pylonsapp
 
        config = wsgiapp.config
 

	
 
        self.app = TestApp(wsgiapp)
 
        url._push_object(URLGenerator(config['routes.map'], environ))
 
        self.sa = meta.Session
 
        self.index_location = config['app_conf']['index_dir']
 
        TestCase.__init__(self, *args, **kwargs)
 

	
 
    def log_user(self, username='test_admin', password='test12'):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username':username,
 
                                  'password':password})
 

	
 
        if 'invalid user name' in response.body:
 
            self.fail('could not login using %s %s' % (username, password))
 

	
 
        self.assertEqual(response.status, '302 Found')
 
        self.assertEqual(response.session['rhodecode_user'].username, username)
 
        return response.follow()
 

	
 

	
 

	
 
    def checkSessionFlash(self, response, msg):
 
        self.assertTrue('flash' in response.session)
 
        self.assertTrue(msg in response.session['flash'][0][1])
 

	
0 comments (0 inline, 0 general)