Changeset - 6fe3d405ff48
[Not reviewed]
default
0 2 0
Mads Kiilerich - 6 years ago 2019-11-14 22:58:55
mads@kiilerich.com
caching: clarify that arguments to internal @cache_region functions only are used as caching key
2 files changed with 3 insertions and 3 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/feed.py
Show inline comments
 
@@ -86,81 +86,81 @@ class FeedController(BaseRepoController)
 
                            % st)
 
        if diff_processor.limited_diff:
 
            changes = changes + ['\n ' +
 
                                 _('Changeset was too big and was cut off...')]
 

	
 
        # rev link
 
        _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id)
 
        desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
 

	
 
        desc_msg.append('<pre>')
 
        desc_msg.append(h.urlify_text(cs.message))
 
        desc_msg.append('\n')
 
        desc_msg.extend(changes)
 
        if str2bool(CONFIG.get('rss_include_diff', False)):
 
            desc_msg.append('\n\n')
 
            desc_msg.append(raw_diff)
 
        desc_msg.append('</pre>')
 
        return map(safe_unicode, desc_msg)
 

	
 
    def atom(self, repo_name):
 
        """Produce an atom-1.0 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term', '_get_feed_from_cache')
 
        def _get_feed_from_cache(key, kind):
 
        def _get_feed_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
 
            feed = Atom1Feed(
 
                title=_('%s %s feed') % (c.site_name, repo_name),
 
                link=h.canonical_url('summary_home', repo_name=repo_name),
 
                description=_('Changes on %s repository') % repo_name,
 
                language=language,
 
                ttl=ttl
 
            )
 

	
 
            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
 
            for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                              )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'ATOM'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
 
        return _get_feed_from_cache(repo_name, kind)
 

	
 
    def rss(self, repo_name):
 
        """Produce an rss2 feed via feedgenerator module"""
 

	
 
        @cache_region('long_term', '_get_feed_from_cache')
 
        def _get_feed_from_cache(key, kind):
 
        def _get_feed_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
 
            feed = Rss201rev2Feed(
 
                title=_('%s %s feed') % (c.site_name, repo_name),
 
                link=h.canonical_url('summary_home', repo_name=repo_name),
 
                description=_('Changes on %s repository') % repo_name,
 
                language=language,
 
                ttl=ttl
 
            )
 

	
 
            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
 
            for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
 
                feed.add_item(title=self._get_title(cs),
 
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
 
                                       revision=cs.raw_id),
 
                              author_name=cs.author,
 
                              description=''.join(self.__get_desc(cs)),
 
                              pubdate=cs.date,
 
                             )
 

	
 
            response.content_type = feed.mime_type
 
            return feed.writeString('utf-8')
 

	
 
        kind = 'RSS'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
kallithea/controllers/summary.py
Show inline comments
 
@@ -45,49 +45,49 @@ from kallithea.lib.celerylib.tasks impor
 
from kallithea.lib.compat import json
 
from kallithea.lib.markup_renderer import MarkupRenderer
 
from kallithea.lib.page import RepoPage
 
from kallithea.lib.utils2 import safe_int
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.model.db import CacheInvalidation, Statistics
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
README_FILES = [''.join([x[0][0], x[1][0]]) for x in
 
                    sorted(list(itertools.product(ALL_READMES, ALL_EXTS)),
 
                           key=lambda y:y[0][1] + y[1][1])]
 

	
 

	
 
class SummaryController(BaseRepoController):
 

	
 
    def __get_readme_data(self, db_repo):
 
        repo_name = db_repo.repo_name
 
        log.debug('Looking for README file')
 

	
 
        @cache_region('long_term', '_get_readme_from_cache')
 
        def _get_readme_from_cache(key, kind):
 
        def _get_readme_from_cache(*_cache_keys):  # parameters are not really used - only as caching key
 
            readme_data = None
 
            readme_file = None
 
            try:
 
                # gets the landing revision! or tip if fails
 
                cs = db_repo.get_landing_changeset()
 
                if isinstance(cs, EmptyChangeset):
 
                    raise EmptyRepositoryError()
 
                renderer = MarkupRenderer()
 
                for f in README_FILES:
 
                    try:
 
                        readme = cs.get_node(f)
 
                        if not isinstance(readme, FileNode):
 
                            continue
 
                        readme_file = f
 
                        log.debug('Found README file `%s` rendering...',
 
                                  readme_file)
 
                        readme_data = renderer.render(readme.content,
 
                                                      filename=f)
 
                        break
 
                    except NodeDoesNotExistError:
 
                        continue
 
            except ChangesetError:
 
                log.error(traceback.format_exc())
 
                pass
0 comments (0 inline, 0 general)