Changeset - 2e9f2bd28143
[Not reviewed]
beta
0 1 0
Marcin Kuzminski - 15 years ago 2011-02-12 13:56:28
marcin@python-works.com
fixed add cache defaults missing data_dir
1 file changed with 3 insertions and 0 deletions:
0 comments (0 inline, 0 general)
rhodecode/lib/utils.py
Show inline comments
 
@@ -317,366 +317,369 @@ class EmptyChangeset(BaseChangeset):
 
        return self._empty_cs
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    def get_file_changeset(self, path):
 
        return self
 

	
 
    def get_file_content(self, path):
 
        return u''
 

	
 
    def get_file_size(self, path):
 
        return 0
 

	
 
def map_groups(groups):
 
    """Checks for groups existence, and creates groups structures.
 
    It returns last group in structure
 
    
 
    :param groups: list of groups structure
 
    """
 
    sa = meta.Session()
 

	
 
    parent = None
 
    group = None
 
    for lvl, group_name in enumerate(groups[:-1]):
 
        group = sa.query(Group).filter(Group.group_name == group_name).scalar()
 

	
 
        if group is None:
 
            group = Group(group_name, parent)
 
            sa.add(group)
 
            sa.commit()
 

	
 
        parent = group
 

	
 
    return group
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False):
 
    """maps all repos given in initial_repo_list, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_list and removes them.
 
    
 
    :param initial_repo_list: list of repositories found by scanning methods
 
    :param remove_obsolete: check for obsolete entries in database
 
    """
 

	
 
    sa = meta.Session()
 
    rm = RepoModel()
 
    user = sa.query(User).filter(User.admin == True).first()
 

	
 
    for name, repo in initial_repo_list.items():
 
        group = map_groups(name.split('/'))
 
        if not rm.get_by_repo_name(name, cache=False):
 
            log.info('repository %s not found creating default', name)
 

	
 
            form_data = {
 
                         'repo_name':name,
 
                         'repo_type':repo.alias,
 
                         'description':repo.description \
 
                            if repo.description != 'unknown' else \
 
                                        '%s repository' % name,
 
                         'private':False,
 
                         'group_id':getattr(group, 'group_id', None)
 
                         }
 
            rm.create(form_data, user, just_db=True)
 

	
 
    if remove_obsolete:
 
        #remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                sa.delete(repo)
 
                sa.commit()
 

	
 
class OrderedDict(dict, DictMixin):
 

	
 
    def __init__(self, *args, **kwds):
 
        if len(args) > 1:
 
            raise TypeError('expected at most 1 arguments, got %d' % len(args))
 
        try:
 
            self.__end
 
        except AttributeError:
 
            self.clear()
 
        self.update(*args, **kwds)
 

	
 
    def clear(self):
 
        self.__end = end = []
 
        end += [None, end, end]         # sentinel node for doubly linked list
 
        self.__map = {}                 # key --> [key, prev, next]
 
        dict.clear(self)
 

	
 
    def __setitem__(self, key, value):
 
        if key not in self:
 
            end = self.__end
 
            curr = end[1]
 
            curr[2] = end[1] = self.__map[key] = [key, curr, end]
 
        dict.__setitem__(self, key, value)
 

	
 
    def __delitem__(self, key):
 
        dict.__delitem__(self, key)
 
        key, prev, next = self.__map.pop(key)
 
        prev[2] = next
 
        next[1] = prev
 

	
 
    def __iter__(self):
 
        end = self.__end
 
        curr = end[2]
 
        while curr is not end:
 
            yield curr[0]
 
            curr = curr[2]
 

	
 
    def __reversed__(self):
 
        end = self.__end
 
        curr = end[1]
 
        while curr is not end:
 
            yield curr[0]
 
            curr = curr[1]
 

	
 
    def popitem(self, last=True):
 
        if not self:
 
            raise KeyError('dictionary is empty')
 
        if last:
 
            key = reversed(self).next()
 
        else:
 
            key = iter(self).next()
 
        value = self.pop(key)
 
        return key, value
 

	
 
    def __reduce__(self):
 
        items = [[k, self[k]] for k in self]
 
        tmp = self.__map, self.__end
 
        del self.__map, self.__end
 
        inst_dict = vars(self).copy()
 
        self.__map, self.__end = tmp
 
        if inst_dict:
 
            return (self.__class__, (items,), inst_dict)
 
        return self.__class__, (items,)
 

	
 
    def keys(self):
 
        return list(self)
 

	
 
    setdefault = DictMixin.setdefault
 
    update = DictMixin.update
 
    pop = DictMixin.pop
 
    values = DictMixin.values
 
    items = DictMixin.items
 
    iterkeys = DictMixin.iterkeys
 
    itervalues = DictMixin.itervalues
 
    iteritems = DictMixin.iteritems
 

	
 
    def __repr__(self):
 
        if not self:
 
            return '%s()' % (self.__class__.__name__,)
 
        return '%s(%r)' % (self.__class__.__name__, self.items())
 

	
 
    def copy(self):
 
        return self.__class__(self)
 

	
 
    @classmethod
 
    def fromkeys(cls, iterable, value=None):
 
        d = cls()
 
        for key in iterable:
 
            d[key] = value
 
        return d
 

	
 
    def __eq__(self, other):
 
        if isinstance(other, OrderedDict):
 
            return len(self) == len(other) and self.items() == other.items()
 
        return dict.__eq__(self, other)
 

	
 
    def __ne__(self, other):
 
        return not self == other
 

	
 

	
 
#set cache regions for beaker so celery can utilise it
 
def add_cache(settings):
 
    cache_settings = {'regions':None}
 
    for key in settings.keys():
 
        for prefix in ['beaker.cache.', 'cache.']:
 
            if key.startswith(prefix):
 
                name = key.split(prefix)[1].strip()
 
                cache_settings[name] = settings[key].strip()
 
    if cache_settings['regions']:
 
        for region in cache_settings['regions'].split(','):
 
            region = region.strip()
 
            region_settings = {}
 
            for key, value in cache_settings.items():
 
                if key.startswith(region):
 
                    region_settings[key.split('.')[1]] = value
 
            region_settings['expire'] = int(region_settings.get('expire',
 
                                                                60))
 
            region_settings.setdefault('lock_dir',
 
                                       cache_settings.get('lock_dir'))
 
            region_settings.setdefault('data_dir',
 
                                       cache_settings.get('data_dir'))
 

	
 
            if 'type' not in region_settings:
 
                region_settings['type'] = cache_settings.get('type',
 
                                                             'memory')
 
            beaker.cache.cache_regions[region] = region_settings
 

	
 
def get_current_revision():
 
    """Returns tuple of (number, id) from repository containing this package
 
    or None if repository could not be found.
 
    """
 

	
 
    try:
 
        from vcs import get_repo
 
        from vcs.utils.helpers import get_scm
 
        from vcs.exceptions import RepositoryError, VCSError
 
        repopath = os.path.join(os.path.dirname(__file__), '..', '..')
 
        scm = get_scm(repopath)[0]
 
        repo = get_repo(path=repopath, alias=scm)
 
        tip = repo.get_changeset()
 
        return (tip.revision, tip.short_id)
 
    except (ImportError, RepositoryError, VCSError), err:
 
        logging.debug("Cannot retrieve rhodecode's revision. Original error "
 
                      "was: %s" % err)
 
        return None
 

	
 
#===============================================================================
 
# TEST FUNCTIONS AND CREATORS
 
#===============================================================================
 
def create_test_index(repo_location, full_index):
 
    """Makes default test index
 
    :param repo_location:
 
    :param full_index:
 
    """
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    from rhodecode.lib.pidlock import DaemonLock, LockHeld
 
    import shutil
 

	
 
    index_location = os.path.join(repo_location, 'index')
 
    if os.path.exists(index_location):
 
        shutil.rmtree(index_location)
 

	
 
    try:
 
        l = DaemonLock()
 
        WhooshIndexingDaemon(index_location=index_location,
 
                             repo_location=repo_location)\
 
            .run(full_index=full_index)
 
        l.release()
 
    except LockHeld:
 
        pass
 

	
 
def create_test_env(repos_test_path, config):
 
    """Makes a fresh database and 
 
    install test repository into tmp dir
 
    """
 
    from rhodecode.lib.db_manage import DbManage
 
    from rhodecode.tests import HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, \
 
        HG_FORK, GIT_FORK, TESTS_TMP_PATH
 
    import tarfile
 
    import shutil
 
    from os.path import dirname as dn, join as jn, abspath
 

	
 
    log = logging.getLogger('TestEnvCreator')
 
    # create logger
 
    log.setLevel(logging.DEBUG)
 
    log.propagate = True
 
    # create console handler and set level to debug
 
    ch = logging.StreamHandler()
 
    ch.setLevel(logging.DEBUG)
 

	
 
    # create formatter
 
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
 

	
 
    # add formatter to ch
 
    ch.setFormatter(formatter)
 

	
 
    # add ch to logger
 
    log.addHandler(ch)
 

	
 
    #PART ONE create db
 
    dbconf = config['sqlalchemy.db1.url']
 
    log.debug('making test db %s', dbconf)
 

	
 
    dbmanage = DbManage(log_sql=True, dbconf=dbconf, root=config['here'],
 
                        tests=True)
 
    dbmanage.create_tables(override=True)
 
    dbmanage.config_prompt(repos_test_path)
 
    dbmanage.create_default_user()
 
    dbmanage.admin_prompt()
 
    dbmanage.create_permissions()
 
    dbmanage.populate_default_permissions()
 

	
 
    #PART TWO make test repo
 
    log.debug('making test vcs repositories')
 

	
 
    #remove old one from previos tests
 
    for r in [HG_REPO, GIT_REPO, NEW_HG_REPO, NEW_GIT_REPO, HG_FORK, GIT_FORK]:
 

	
 
        if os.path.isdir(jn(TESTS_TMP_PATH, r)):
 
            log.debug('removing %s', r)
 
            shutil.rmtree(jn(TESTS_TMP_PATH, r))
 

	
 
    #CREATE DEFAULT HG REPOSITORY
 
    cur_dir = dn(dn(abspath(__file__)))
 
    tar = tarfile.open(jn(cur_dir, 'tests', "vcs_test_hg.tar.gz"))
 
    tar.extractall(jn(TESTS_TMP_PATH, HG_REPO))
 
    tar.close()
 

	
 

	
 
#==============================================================================
 
# PASTER COMMANDS
 
#==============================================================================
 

	
 
class BasePasterCommand(Command):
 
    """
 
    Abstract Base Class for paster commands.
 

	
 
    The celery commands are somewhat aggressive about loading
 
    celery.conf, and since our module sets the `CELERY_LOADER`
 
    environment variable to our loader, we have to bootstrap a bit and
 
    make sure we've had a chance to load the pylons config off of the
 
    command line, otherwise everything fails.
 
    """
 
    min_args = 1
 
    min_args_error = "Please provide a paster config file as an argument."
 
    takes_config_file = 1
 
    requires_config_file = True
 

	
 
    def notify_msg(self, msg, log=False):
 
        """Make a notification to user, additionally if logger is passed
 
        it logs this action using given logger
 
        
 
        :param msg: message that will be printed to user
 
        :param log: logging instance, to use to additionally log this message
 
        
 
        """
 
        if log and isinstance(log, logging):
 
            log(msg)
 

	
 

	
 
    def run(self, args):
 
        """
 
        Overrides Command.run
 
        
 
        Checks for a config file argument and loads it.
 
        """
 
        if len(args) < self.min_args:
 
            raise BadCommand(
 
                self.min_args_error % {'min_args': self.min_args,
 
                                       'actual_args': len(args)})
 

	
 
        # Decrement because we're going to lob off the first argument.
 
        # @@ This is hacky
 
        self.min_args -= 1
 
        self.bootstrap_config(args[0])
 
        self.update_parser()
 
        return super(BasePasterCommand, self).run(args[1:])
 

	
 
    def update_parser(self):
 
        """
 
        Abstract method.  Allows for the class's parser to be updated
 
        before the superclass's `run` method is called.  Necessary to
 
        allow options/arguments to be passed through to the underlying
 
        celery command.
 
        """
 
        raise NotImplementedError("Abstract Method.")
 

	
 
    def bootstrap_config(self, conf):
 
        """
 
        Loads the pylons configuration.
 
        """
 
        from pylons import config as pylonsconfig
 

	
 
        path_to_ini_file = os.path.realpath(conf)
 
        conf = paste.deploy.appconfig('config:' + path_to_ini_file)
 
        pylonsconfig.init_app(conf.global_conf, conf.local_conf)
0 comments (0 inline, 0 general)