Changeset - 34093903b505
[Not reviewed]
beta
0 2 0
Marcin Kuzminski - 13 years ago 2013-04-05 23:16:21
marcin@python-works.com
repo rescann should detach forks of zombie repos,
this way we ensure we do proper cleanup of such repos.
2 files changed with 7 insertions and 3 deletions:
0 comments (0 inline, 0 general)
rhodecode/lib/utils.py
Show inline comments
 
@@ -374,195 +374,195 @@ def map_groups(path):
 
    rgm = ReposGroupModel(sa)
 
    for lvl, group_name in enumerate(groups):
 
        group_name = '/'.join(groups[:lvl] + [group_name])
 
        group = RepoGroup.get_by_group_name(group_name)
 
        desc = '%s group' % group_name
 

	
 
        # skip folders that are now removed repos
 
        if REMOVED_REPO_PAT.match(group_name):
 
            break
 

	
 
        if group is None:
 
            log.debug('creating group level: %s group_name: %s' % (lvl,
 
                                                                   group_name))
 
            group = RepoGroup(group_name, parent)
 
            group.group_description = desc
 
            sa.add(group)
 
            rgm._create_default_perms(group)
 
            sa.flush()
 
        parent = group
 
    return group
 

	
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False,
 
                   install_git_hook=False):
 
    """
 
    maps all repos given in initial_repo_list, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_list and removes them.
 

	
 
    :param initial_repo_list: list of repositories found by scanning methods
 
    :param remove_obsolete: check for obsolete entries in database
 
    :param install_git_hook: if this is True, also check and install githook
 
        for a repo if missing
 
    """
 
    from rhodecode.model.repo import RepoModel
 
    from rhodecode.model.scm import ScmModel
 
    sa = meta.Session()
 
    rm = RepoModel()
 
    user = sa.query(User).filter(User.admin == True).first()
 
    if user is None:
 
        raise Exception('Missing administrative account!')
 
    added = []
 

	
 
    ##creation defaults
 
    defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 
    private = defs.get('repo_private')
 

	
 
    for name, repo in initial_repo_list.items():
 
        group = map_groups(name)
 
        db_repo = rm.get_by_repo_name(name)
 
        # found repo that is on filesystem not in RhodeCode database
 
        if not db_repo:
 
            log.info('repository %s not found, creating now' % name)
 
            added.append(name)
 
            desc = (repo.description
 
                    if repo.description != 'unknown'
 
                    else '%s repository' % name)
 

	
 
            new_repo = rm.create_repo(
 
                repo_name=name,
 
                repo_type=repo.alias,
 
                description=desc,
 
                repos_group=getattr(group, 'group_id', None),
 
                owner=user,
 
                just_db=True,
 
                enable_locking=enable_locking,
 
                enable_downloads=enable_downloads,
 
                enable_statistics=enable_statistics,
 
                private=private
 
            )
 
            # we added that repo just now, and make sure it has githook
 
            # installed
 
            if new_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(new_repo.scm_instance)
 
            new_repo.update_changeset_cache()
 
        elif install_git_hook:
 
            if db_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(db_repo.scm_instance)
 
        # during starting install all cache keys for all repositories in the
 
        # system, this will register all repos and multiple instances
 
        cache_key = CacheInvalidation._get_cache_key(name)
 
        log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
 
        CacheInvalidation.invalidate(name)
 

	
 
    sa.commit()
 
    removed = []
 
    if remove_obsolete:
 
        # remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                log.debug("Removing non-existing repository found in db `%s`" %
 
                          repo.repo_name)
 
                try:
 
                    sa.delete(repo)
 
                    removed.append(repo.repo_name)
 
                    RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
 
                    sa.commit()
 
                    removed.append(repo.repo_name)
 
                except Exception:
 
                    #don't hold further removals on error
 
                    log.error(traceback.format_exc())
 
                    sa.rollback()
 
    return added, removed
 

	
 

	
 
# set cache regions for beaker so celery can utilise it
 
def add_cache(settings):
 
    cache_settings = {'regions': None}
 
    for key in settings.keys():
 
        for prefix in ['beaker.cache.', 'cache.']:
 
            if key.startswith(prefix):
 
                name = key.split(prefix)[1].strip()
 
                cache_settings[name] = settings[key].strip()
 
    if cache_settings['regions']:
 
        for region in cache_settings['regions'].split(','):
 
            region = region.strip()
 
            region_settings = {}
 
            for key, value in cache_settings.items():
 
                if key.startswith(region):
 
                    region_settings[key.split('.')[1]] = value
 
            region_settings['expire'] = int(region_settings.get('expire',
 
                                                                60))
 
            region_settings.setdefault('lock_dir',
 
                                       cache_settings.get('lock_dir'))
 
            region_settings.setdefault('data_dir',
 
                                       cache_settings.get('data_dir'))
 

	
 
            if 'type' not in region_settings:
 
                region_settings['type'] = cache_settings.get('type',
 
                                                             'memory')
 
            beaker.cache.cache_regions[region] = region_settings
 

	
 

	
 
def load_rcextensions(root_path):
 
    import rhodecode
 
    from rhodecode.config import conf
 

	
 
    path = os.path.join(root_path, 'rcextensions', '__init__.py')
 
    if os.path.isfile(path):
 
        rcext = create_module('rc', path)
 
        EXT = rhodecode.EXTENSIONS = rcext
 
        log.debug('Found rcextensions now loading %s...' % rcext)
 

	
 
        # Additional mappings that are not present in the pygments lexers
 
        conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
 

	
 
        #OVERRIDE OUR EXTENSIONS FROM RC-EXTENSIONS (if present)
 

	
 
        if getattr(EXT, 'INDEX_EXTENSIONS', []) != []:
 
            log.debug('settings custom INDEX_EXTENSIONS')
 
            conf.INDEX_EXTENSIONS = getattr(EXT, 'INDEX_EXTENSIONS', [])
 

	
 
        #ADDITIONAL MAPPINGS
 
        log.debug('adding extra into INDEX_EXTENSIONS')
 
        conf.INDEX_EXTENSIONS.extend(getattr(EXT, 'EXTRA_INDEX_EXTENSIONS', []))
 

	
 
        # auto check if the module is not missing any data, set to default if is
 
        # this will help autoupdate new feature of rcext module
 
        from rhodecode.config import rcextensions
 
        for k in dir(rcextensions):
 
            if not k.startswith('_') and not hasattr(EXT, k):
 
                setattr(EXT, k, getattr(rcextensions, k))
 

	
 

	
 
def get_custom_lexer(extension):
 
    """
 
    returns a custom lexer if it's defined in rcextensions module, or None
 
    if there's no custom lexer defined
 
    """
 
    import rhodecode
 
    from pygments import lexers
 
    #check if we didn't define this extension as other lexer
 
    if rhodecode.EXTENSIONS and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
 
        _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
 
        return lexers.get_lexer_by_name(_lexer_name)
 

	
 

	
 
#==============================================================================
 
# TEST FUNCTIONS AND CREATORS
 
#==============================================================================
 
def create_test_index(repo_location, config, full_index):
 
    """
 
    Makes default test index
 

	
 
    :param config: test config
 
    :param full_index:
 
    """
 

	
 
    from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon
 
    from rhodecode.lib.pidlock import DaemonLock, LockHeld
 

	
 
    repo_location = repo_location
 

	
 
    index_location = os.path.join(config['app_conf']['index_dir'])
rhodecode/model/repo.py
Show inline comments
 
@@ -373,218 +373,222 @@ class RepoModel(BaseModel):
 
                # create default permission
 
                repo_to_perm = UserRepoToPerm()
 
                default = 'repository.read'
 
                for p in User.get_by_username('default').user_perms:
 
                    if p.permission.permission_name.startswith('repository.'):
 
                        default = p.permission.permission_name
 
                        break
 

	
 
                default_perm = 'repository.none' if private else default
 

	
 
                repo_to_perm.permission_id = self.sa.query(Permission)\
 
                        .filter(Permission.permission_name == default_perm)\
 
                        .one().permission_id
 

	
 
                repo_to_perm.repository = new_repo
 
                repo_to_perm.user_id = User.get_by_username('default').user_id
 

	
 
                self.sa.add(repo_to_perm)
 

	
 
            if fork_of:
 
                if copy_fork_permissions:
 
                    repo = fork_of
 
                    user_perms = UserRepoToPerm.query()\
 
                        .filter(UserRepoToPerm.repository == repo).all()
 
                    group_perms = UserGroupRepoToPerm.query()\
 
                        .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                    for perm in user_perms:
 
                        UserRepoToPerm.create(perm.user, new_repo,
 
                                              perm.permission)
 

	
 
                    for perm in group_perms:
 
                        UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                                    perm.permission)
 
                else:
 
                    _create_default_perms()
 
            else:
 
                _create_default_perms()
 

	
 
            if not just_db:
 
                self.__create_repo(repo_name, repo_type,
 
                                   repos_group,
 
                                   clone_uri)
 
                log_create_repository(new_repo.get_dict(),
 
                                      created_by=owner.username)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
 
                                                    owner.user_id)
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user, just_db=False, fork=None):
 
        """
 
        Backward compatibility function, just a wrapper on top of create_repo
 

	
 
        :param form_data:
 
        :param cur_user:
 
        :param just_db:
 
        :param fork:
 
        """
 
        owner = cur_user
 
        repo_name = form_data['repo_name_full']
 
        repo_type = form_data['repo_type']
 
        description = form_data['repo_description']
 
        private = form_data['repo_private']
 
        clone_uri = form_data.get('clone_uri')
 
        repos_group = form_data['repo_group']
 
        landing_rev = form_data['repo_landing_rev']
 
        copy_fork_permissions = form_data.get('copy_permissions')
 
        fork_of = form_data.get('fork_parent_id')
 

	
 
        ## repo creation defaults, private and repo_type are filled in form
 
        defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
 
        enable_statistics = defs.get('repo_enable_statistics')
 
        enable_locking = defs.get('repo_enable_locking')
 
        enable_downloads = defs.get('repo_enable_downloads')
 

	
 
        return self.create_repo(
 
            repo_name, repo_type, description, owner, private, clone_uri,
 
            repos_group, landing_rev, just_db, fork_of, copy_fork_permissions,
 
            enable_statistics, enable_locking, enable_downloads
 
        )
 

	
 
    def create_fork(self, form_data, cur_user):
 
        """
 
        Simple wrapper into executing celery task for fork creation
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from rhodecode.lib.celerylib import tasks, run_task
 
        run_task(tasks.create_repo_fork, form_data, cur_user)
 

	
 
    def delete(self, repo, forks=None):
 
    def delete(self, repo, forks=None, fs_remove=True):
 
        """
 
        Delete given repository, forks parameter defines what do do with
 
        attached forks. Throws AttachedForksError if deleted repo has attached
 
        forks
 

	
 
        :param repo:
 
        :param forks: str 'delete' or 'detach'
 
        :param fs_remove: remove(archive) repo from filesystem
 
        """
 
        repo = self._get_repo(repo)
 
        if repo:
 
            if forks == 'detach':
 
                for r in repo.forks:
 
                    r.fork = None
 
                    self.sa.add(r)
 
            elif forks == 'delete':
 
                for r in repo.forks:
 
                    self.delete(r, forks='delete')
 
            elif [f for f in repo.forks]:
 
                raise AttachedForksError()
 

	
 
            old_repo_dict = repo.get_dict()
 
            owner = repo.user
 
            try:
 
                self.sa.delete(repo)
 
                if fs_remove:
 
                self.__delete_repo(repo)
 
                else:
 
                    log.debug('skipping removal from filesystem')
 
                log_delete_repository(old_repo_dict,
 
                                      deleted_by=owner.username)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
    def grant_user_permission(self, repo, user, perm):
 
        """
 
        Grant permission for user on given repository, or update existing one
 
        if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserRepoToPerm)\
 
            .filter(UserRepoToPerm.user == user)\
 
            .filter(UserRepoToPerm.repository == repo)\
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserRepoToPerm()
 
        obj.repository = repo
 
        obj.user = user
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s' % (perm, user, repo))
 

	
 
    def revoke_user_permission(self, repo, user):
 
        """
 
        Revoke permission for user on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 

	
 
        obj = self.sa.query(UserRepoToPerm)\
 
            .filter(UserRepoToPerm.repository == repo)\
 
            .filter(UserRepoToPerm.user == user)\
 
            .scalar()
 
        if obj:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm on %s on %s' % (repo, user))
 

	
 
    def grant_users_group_permission(self, repo, group_name, perm):
 
        """
 
        Grant permission for user group on given repository, or update
 
        existing one if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self.__get_users_group(group_name)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserGroupRepoToPerm)\
 
            .filter(UserGroupRepoToPerm.users_group == group_name)\
 
            .filter(UserGroupRepoToPerm.repository == repo)\
 
            .scalar()
 

	
 
        if obj is None:
 
            # create new
 
            obj = UserGroupRepoToPerm()
 

	
 
        obj.repository = repo
 
        obj.users_group = group_name
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s' % (perm, group_name, repo))
 

	
 
    def revoke_users_group_permission(self, repo, group_name):
 
        """
 
        Revoke permission for user group on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self.__get_users_group(group_name)
 

	
 
        obj = self.sa.query(UserGroupRepoToPerm)\
 
            .filter(UserGroupRepoToPerm.repository == repo)\
 
            .filter(UserGroupRepoToPerm.users_group == group_name)\
0 comments (0 inline, 0 general)