Changeset - 60335b702a00
[Not reviewed]
beta
0 3 0
Mads Kiilerich - 13 years ago 2013-04-03 15:56:12
madski@unity3d.com
Grafted from: a941f32fc5a6
invalidation: don't create CacheInvalidation records on startup

Creating the records early gave an advantage before lightweight was introduced.
With lightweight it is no longer necessary.

The records will be created on demand anyway and there is no reason to create and
maintain them before they are used.
3 files changed with 4 insertions and 6 deletions:
0 comments (0 inline, 0 general)
rhodecode/controllers/summary.py
Show inline comments
 
@@ -81,97 +81,97 @@ class SummaryController(BaseRepoControll
 
            branches_group[0].append((chs, name),)
 
        download_l.append(branches_group)
 

	
 
        for name, chs in c.rhodecode_repo.tags.items():
 
            #chs = chs.split(':')[-1]
 
            tags_group[0].append((chs, name),)
 
        download_l.append(tags_group)
 

	
 
        return download_l
 

	
 
    def __get_readme_data(self, db_repo):
 
        repo_name = db_repo.repo_name
 

	
 
        @cache_region('long_term')
 
        def _get_readme_from_cache(key, kind):
 
            readme_data = None
 
            readme_file = None
 
            log.debug('Looking for README file')
 
            try:
 
                # get's the landing revision! or tip if fails
 
                cs = db_repo.get_landing_changeset()
 
                if isinstance(cs, EmptyChangeset):
 
                    raise EmptyRepositoryError()
 
                renderer = MarkupRenderer()
 
                for f in README_FILES:
 
                    try:
 
                        readme = cs.get_node(f)
 
                        if not isinstance(readme, FileNode):
 
                            continue
 
                        readme_file = f
 
                        log.debug('Found README file `%s` rendering...' %
 
                                  readme_file)
 
                        readme_data = renderer.render(readme.content, f)
 
                        break
 
                    except NodeDoesNotExistError:
 
                        continue
 
            except ChangesetError:
 
                log.error(traceback.format_exc())
 
                pass
 
            except EmptyRepositoryError:
 
                pass
 
            except Exception:
 
                log.error(traceback.format_exc())
 

	
 
            return readme_data, readme_file
 

	
 
        kind = 'README'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
		if not valid:
 
        if not valid:
 
            region_invalidate(_get_readme_from_cache, None, repo_name, kind)
 
        return _get_readme_from_cache(repo_name, kind)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name):
 
        c.dbrepo = dbrepo = c.rhodecode_db_repo
 
        _load_changelog_summary()
 
        if self.rhodecode_user.username == 'default':
 
            # for default(anonymous) user we don't need to pass credentials
 
            username = ''
 
            password = ''
 
        else:
 
            username = str(self.rhodecode_user.username)
 
            password = '@'
 

	
 
        parsed_url = urlparse(url.current(qualified=True))
 

	
 
        default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'
 

	
 
        uri_tmpl = config.get('clone_uri', default_clone_uri)
 
        uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
 
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
 
        uri_dict = {
 
           'user': urllib.quote(username),
 
           'pass': password,
 
           'scheme': parsed_url.scheme,
 
           'netloc': parsed_url.netloc,
 
           'path': urllib.quote(safe_str(decoded_path))
 
        }
 

	
 
        uri = (uri_tmpl % uri_dict)
 
        # generate another clone url by id
 
        uri_dict.update(
 
         {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
 
        )
 
        uri_id = uri_tmpl % uri_dict
 

	
 
        c.clone_repo_url = uri
 
        c.clone_repo_url_id = uri_id
 

	
 
        td = date.today() + timedelta(days=1)
 
        td_1m = td - timedelta(days=calendar.mdays[td.month])
 
        td_1y = td - timedelta(days=365)
 

	
 
        ts_min_m = mktime(td_1m.timetuple())
 
        ts_min_y = mktime(td_1y.timetuple())
rhodecode/lib/utils.py
Show inline comments
 
@@ -427,101 +427,96 @@ def repo2db_mapper(initial_repo_list, re
 
    :param install_git_hook: if this is True, also check and install githook
 
        for a repo if missing
 
    """
 
    from rhodecode.model.repo import RepoModel
 
    from rhodecode.model.scm import ScmModel
 
    sa = meta.Session()
 
    rm = RepoModel()
 
    user = User.get_first_admin()
 
    added = []
 

	
 
    ##creation defaults
 
    defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 
    private = defs.get('repo_private')
 

	
 
    for name, repo in initial_repo_list.items():
 
        group = map_groups(name)
 
        db_repo = rm.get_by_repo_name(name)
 
        # found repo that is on filesystem not in RhodeCode database
 
        if not db_repo:
 
            log.info('repository %s not found, creating now' % name)
 
            added.append(name)
 
            desc = (repo.description
 
                    if repo.description != 'unknown'
 
                    else '%s repository' % name)
 

	
 
            new_repo = rm.create_repo(
 
                repo_name=name,
 
                repo_type=repo.alias,
 
                description=desc,
 
                repos_group=getattr(group, 'group_id', None),
 
                owner=user,
 
                just_db=True,
 
                enable_locking=enable_locking,
 
                enable_downloads=enable_downloads,
 
                enable_statistics=enable_statistics,
 
                private=private
 
            )
 
            # we added that repo just now, and make sure it has githook
 
            # installed
 
            if new_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(new_repo.scm_instance)
 
            new_repo.update_changeset_cache()
 
        elif install_git_hook:
 
            if db_repo.repo_type == 'git':
 
                ScmModel().install_git_hook(db_repo.scm_instance)
 
        # during starting install all cache keys for all repositories in the
 
        # system, this will register all repos and multiple instances
 
        cache_key = CacheInvalidation._get_cache_key(name)
 
        log.debug("Creating invalidation cache key for %s: %s", name, cache_key)
 
        CacheInvalidation.test_and_set_valid(name, None)
 

	
 
    sa.commit()
 
    removed = []
 
    if remove_obsolete:
 
        # remove from database those repositories that are not in the filesystem
 
        for repo in sa.query(Repository).all():
 
            if repo.repo_name not in initial_repo_list.keys():
 
                log.debug("Removing non-existing repository found in db `%s`" %
 
                          repo.repo_name)
 
                try:
 
                    removed.append(repo.repo_name)
 
                    RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
 
                    sa.commit()
 
                except Exception:
 
                    #don't hold further removals on error
 
                    log.error(traceback.format_exc())
 
                    sa.rollback()
 
    return added, removed
 

	
 

	
 
# set cache regions for beaker so celery can utilise it
 
def add_cache(settings):
 
    cache_settings = {'regions': None}
 
    for key in settings.keys():
 
        for prefix in ['beaker.cache.', 'cache.']:
 
            if key.startswith(prefix):
 
                name = key.split(prefix)[1].strip()
 
                cache_settings[name] = settings[key].strip()
 
    if cache_settings['regions']:
 
        for region in cache_settings['regions'].split(','):
 
            region = region.strip()
 
            region_settings = {}
 
            for key, value in cache_settings.items():
 
                if key.startswith(region):
 
                    region_settings[key.split('.')[1]] = value
 
            region_settings['expire'] = int(region_settings.get('expire',
 
                                                                60))
 
            region_settings.setdefault('lock_dir',
 
                                       cache_settings.get('lock_dir'))
 
            region_settings.setdefault('data_dir',
 
                                       cache_settings.get('data_dir'))
 

	
 
            if 'type' not in region_settings:
 
                region_settings['type'] = cache_settings.get('type',
 
                                                             'memory')
 
            beaker.cache.cache_regions[region] = region_settings
 

	
 

	
rhodecode/tests/api/api_base.py
Show inline comments
 
@@ -219,96 +219,99 @@ class BaseTestApi(object):
 

	
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_pull(self):
 
        #TODO: issues with rhodecode_extras here.. not sure why !
 
        pass
 

	
 
#        repo_name = 'test_pull'
 
#        r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
#        r.clone_uri = TEST_self.REPO
 
#        Session.add(r)
 
#        Session.commit()
 
#
 
#        id_, params = _build_data(self.apikey, 'pull',
 
#                                  repoid=repo_name,)
 
#        response = self.app.post(API_URL, content_type='application/json',
 
#                                 params=params)
 
#
 
#        expected = 'Pulled from `%s`' % repo_name
 
#        self._compare_ok(id_, expected, given=response.body)
 
#
 
#        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_pull_error(self):
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=self.REPO,)
 
        response = api_call(self, params)
 

	
 
        expected = 'Unable to pull changes from `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_rescan_repos(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos')
 
        response = api_call(self, params)
 

	
 
        expected = {'added': [], 'removed': []}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'repo_scan', crash)
 
    def test_api_rescann_error(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos',)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during rescan repositories action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached() # seed cache
 

	
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = ("Caches of repository `%s` was invalidated" % (self.REPO))
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'mark_for_invalidation', crash)
 
    def test_api_invalidate_cache_error(self):
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during cache invalidation action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = ('User `%s` set lock state for repo `%s` to `%s`'
 
                   % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_by_non_admin(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = ('User `%s` set lock state for repo `%s` to `%s`'
 
                       % (self.TEST_USER_LOGIN, repo_name, True))
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_with_userid(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      userid=TEST_USER_ADMIN_LOGIN,
0 comments (0 inline, 0 general)