Changeset - 1b4fc33931d7
[Not reviewed]
beta
0 2 0
Marcin Kuzminski - 13 years ago 2013-03-06 16:31:22
marcin@python-works.com
fixed some unicode problems with archive downloads
2 files changed with 3 insertions and 2 deletions:
0 comments (0 inline, 0 general)
rhodecode/controllers/files.py
Show inline comments
 
@@ -354,193 +354,194 @@ class FilesController(BaseRepoController
 
            unix_mode = 0
 
            content = convert_line_endings(r_post.get('content'), unix_mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            location = r_post.get('location')
 
            filename = r_post.get('filename')
 
            file_obj = r_post.get('upload_file', None)
 

	
 
            if file_obj is not None and hasattr(file_obj, 'filename'):
 
                filename = file_obj.filename
 
                content = file_obj.file
 

	
 
            node_path = os.path.join(location, filename)
 
            author = self.rhodecode_user.full_contact
 

	
 
            if not content:
 
                h.flash(_('No content'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            if not filename:
 
                h.flash(_('No filename'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 

	
 
            try:
 
                self.scm_model.create_node(repo=c.rhodecode_repo,
 
                                           repo_name=repo_name, cs=c.cs,
 
                                           user=self.rhodecode_user.user_id,
 
                                           author=author, message=message,
 
                                           content=content, f_path=node_path)
 
                h.flash(_('Successfully committed to %s') % node_path,
 
                        category='success')
 
            except NodeAlreadyExistsError, e:
 
                h.flash(_(e), category='error')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_add.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def archivefile(self, repo_name, fname):
 

	
 
        fileformat = None
 
        revision = None
 
        ext = None
 
        subrepos = request.GET.get('subrepos') == 'true'
 

	
 
        for a_type, ext_data in settings.ARCHIVE_SPECS.items():
 
            archive_spec = fname.split(ext_data[1])
 
            if len(archive_spec) == 2 and archive_spec[1] == '':
 
                fileformat = a_type or ext_data[1]
 
                revision = archive_spec[0]
 
                ext = ext_data[1]
 

	
 
        try:
 
            dbrepo = RepoModel().get_by_repo_name(repo_name)
 
            if dbrepo.enable_downloads is False:
 
                return _('downloads disabled')
 

	
 
            if c.rhodecode_repo.alias == 'hg':
 
                # patch and reset hooks section of UI config to not run any
 
                # hooks on fetching archives with subrepos
 
                for k, v in c.rhodecode_repo._repo.ui.configitems('hooks'):
 
                    c.rhodecode_repo._repo.ui.setconfig('hooks', k, None)
 

	
 
            cs = c.rhodecode_repo.get_changeset(revision)
 
            content_type = settings.ARCHIVE_SPECS[fileformat][0]
 
        except ChangesetDoesNotExistError:
 
            return _('Unknown revision %s') % revision
 
        except EmptyRepositoryError:
 
            return _('Empty repository')
 
        except (ImproperArchiveTypeError, KeyError):
 
            return _('Unknown archive type')
 

	
 
        fd, archive = tempfile.mkstemp()
 
        t = open(archive, 'wb')
 
        cs.fill_archive(stream=t, kind=fileformat, subrepos=subrepos)
 
        t.close()
 

	
 
        def get_chunked_archive(archive):
 
            stream = open(archive, 'rb')
 
            while True:
 
                data = stream.read(16 * 1024)
 
                if not data:
 
                    stream.close()
 
                    os.close(fd)
 
                    os.remove(archive)
 
                    break
 
                yield data
 

	
 
        response.content_disposition = str('attachment; filename=%s-%s%s' \
 
                                           % (repo_name, revision[:12], ext))
 
                                           % (safe_str(repo_name),
 
                                              safe_str(revision), ext))
 
        response.content_type = str(content_type)
 
        return get_chunked_archive(archive)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def diff(self, repo_name, f_path):
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = request.GET.get('context', 3)
 
        diff1 = request.GET.get('diff1', '')
 
        diff2 = request.GET.get('diff2', '')
 
        c.action = request.GET.get('diff')
 
        c.no_changes = diff1 == diff2
 
        c.f_path = f_path
 
        c.big_diff = False
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.changes = OrderedDict()
 
        c.changes[diff2] = []
 

	
 
        #special case if we want a show rev only, it's impl here
 
        #to reduce JS and callbacks
 

	
 
        if request.GET.get('show_rev'):
 
            if str2bool(request.GET.get('annotate', 'False')):
 
                _url = url('files_annotate_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 
            else:
 
                _url = url('files_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 

	
 
            return redirect(_url)
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.rhodecode_repo.get_changeset(diff1)
 
                try:
 
                    node1 = c.changeset_1.get_node(f_path)
 
                    if node1.is_dir():
 
                        raise NodeError('%s path is a %s not a file' % (node1, type(node1)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_1 = EmptyChangeset(cs=diff1,
 
                                                   revision=c.changeset_1.revision,
 
                                                   repo=c.rhodecode_repo)
 
                    node1 = FileNode(f_path, '', changeset=c.changeset_1)
 
            else:
 
                c.changeset_1 = EmptyChangeset(repo=c.rhodecode_repo)
 
                node1 = FileNode(f_path, '', changeset=c.changeset_1)
 

	
 
            if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_2 = c.rhodecode_repo.get_changeset(diff2)
 
                try:
 
                    node2 = c.changeset_2.get_node(f_path)
 
                    raise NodeError('%s path is a %s not a file' % (node2, type(node2)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_2 = EmptyChangeset(cs=diff2,
 
                                                   revision=c.changeset_2.revision,
 
                                                   repo=c.rhodecode_repo)
 
                    node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
            else:
 
                c.changeset_2 = EmptyChangeset(repo=c.rhodecode_repo)
 
                node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
        except (RepositoryError, NodeError):
 
            log.error(traceback.format_exc())
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                                f_path=f_path))
 

	
 
        if c.action == 'download':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 

	
 
            diff_name = '%s_vs_%s.diff' % (diff1, diff2)
 
            response.content_type = 'text/plain'
 
            response.content_disposition = (
 
                'attachment; filename=%s' % diff_name
 
            )
 
            return diff.as_raw()
 

	
 
        elif c.action == 'raw':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 
            response.content_type = 'text/plain'
 
            return diff.as_raw()
 

	
 
        else:
 
            fid = h.FID(diff2, node2.path)
 
            line_context_lcl = get_line_ctx(fid, request.GET)
 
            ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
 

	
 
            lim = request.GET.get('fulldiff') or self.cut_off_limit
 
            _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1,
 
                                         filenode_new=node2,
rhodecode/model/db.py
Show inline comments
 
@@ -1641,193 +1641,193 @@ class CacheInvalidation(Base, BaseModel)
 
        self.cache_args = cache_args
 
        self.cache_active = False
 

	
 
    def __unicode__(self):
 
        return u"<%s('%s:%s')>" % (self.__class__.__name__,
 
                                  self.cache_id, self.cache_key)
 

	
 
    @property
 
    def prefix(self):
 
        _split = self.cache_key.split(self.cache_args, 1)
 
        if _split and len(_split) == 2:
 
            return _split[0]
 
        return ''
 

	
 
    @classmethod
 
    def clear_cache(cls):
 
        cls.query().delete()
 

	
 
    @classmethod
 
    def _get_key(cls, key):
 
        """
 
        Wrapper for generating a key, together with a prefix
 

	
 
        :param key:
 
        """
 
        import rhodecode
 
        prefix = ''
 
        org_key = key
 
        iid = rhodecode.CONFIG.get('instance_id')
 
        if iid:
 
            prefix = iid
 

	
 
        return "%s%s" % (prefix, key), prefix, org_key
 

	
 
    @classmethod
 
    def get_by_key(cls, key):
 
        return cls.query().filter(cls.cache_key == key).scalar()
 

	
 
    @classmethod
 
    def get_by_repo_name(cls, repo_name):
 
        return cls.query().filter(cls.cache_args == repo_name).all()
 

	
 
    @classmethod
 
    def _get_or_create_key(cls, key, repo_name, commit=True):
 
        inv_obj = Session().query(cls).filter(cls.cache_key == key).scalar()
 
        if not inv_obj:
 
            try:
 
                inv_obj = CacheInvalidation(key, repo_name)
 
                Session().add(inv_obj)
 
                if commit:
 
                    Session().commit()
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                Session().rollback()
 
        return inv_obj
 

	
 
    @classmethod
 
    def invalidate(cls, key):
 
        """
 
        Returns Invalidation object if this given key should be invalidated
 
        None otherwise. `cache_active = False` means that this cache
 
        state is not valid and needs to be invalidated
 

	
 
        :param key:
 
        """
 
        repo_name = key
 
        repo_name = remove_suffix(repo_name, '_README')
 
        repo_name = remove_suffix(repo_name, '_RSS')
 
        repo_name = remove_suffix(repo_name, '_ATOM')
 

	
 
        # adds instance prefix
 
        key, _prefix, _org_key = cls._get_key(key)
 
        inv = cls._get_or_create_key(key, repo_name)
 

	
 
        if inv and inv.cache_active is False:
 
            return inv
 

	
 
    @classmethod
 
    def set_invalidate(cls, key=None, repo_name=None):
 
        """
 
        Mark this Cache key for invalidation, either by key or whole
 
        cache sets based on repo_name
 

	
 
        :param key:
 
        """
 
        invalidated_keys = []
 
        if key:
 
            key, _prefix, _org_key = cls._get_key(key)
 
            inv_objs = Session().query(cls).filter(cls.cache_key == key).all()
 
        elif repo_name:
 
            inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 

	
 
        try:
 
            for inv_obj in inv_objs:
 
                inv_obj.cache_active = False
 
                log.debug('marking %s key for invalidation based on key=%s,repo_name=%s'
 
                  % (inv_obj, key, repo_name))
 
                  % (inv_obj, key, safe_str(repo_name)))
 
                invalidated_keys.append(inv_obj.cache_key)
 
                Session().add(inv_obj)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 
        return invalidated_keys
 

	
 
    @classmethod
 
    def set_valid(cls, key):
 
        """
 
        Mark this cache key as active and currently cached
 

	
 
        :param key:
 
        """
 
        inv_obj = cls.get_by_key(key)
 
        inv_obj.cache_active = True
 
        Session().add(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def get_cache_map(cls):
 

	
 
        class cachemapdict(dict):
 

	
 
            def __init__(self, *args, **kwargs):
 
                fixkey = kwargs.get('fixkey')
 
                if fixkey:
 
                    del kwargs['fixkey']
 
                self.fixkey = fixkey
 
                super(cachemapdict, self).__init__(*args, **kwargs)
 

	
 
            def __getattr__(self, name):
 
                key = name
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                if key in self.__dict__:
 
                    return self.__dict__[key]
 
                else:
 
                    return self[key]
 

	
 
            def __getitem__(self, key):
 
                if self.fixkey:
 
                    key, _prefix, _org_key = cls._get_key(key)
 
                try:
 
                    return super(cachemapdict, self).__getitem__(key)
 
                except KeyError:
 
                    return
 

	
 
        cache_map = cachemapdict(fixkey=True)
 
        for obj in cls.query().all():
 
            cache_map[obj.cache_key] = cachemapdict(obj.get_dict())
 
        return cache_map
 

	
 

	
 
class ChangesetComment(Base, BaseModel):
 
    __tablename__ = 'changeset_comments'
 
    __table_args__ = (
 
        Index('cc_revision_idx', 'revision'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8'},
 
    )
 
    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
 
    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    revision = Column('revision', String(40), nullable=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 
    line_no = Column('line_no', Unicode(10), nullable=True)
 
    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
 
    f_path = Column('f_path', Unicode(1000), nullable=True)
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    text = Column('text', UnicodeText(25000), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    author = relationship('User', lazy='joined')
 
    repo = relationship('Repository')
 
    status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
 
    pull_request = relationship('PullRequest', lazy='joined')
 

	
 
    @classmethod
 
    def get_users(cls, revision=None, pull_request_id=None):
 
        """
 
        Returns user associated with this ChangesetComment. ie those
 
        who actually commented
 

	
 
        :param cls:
 
        :param revision:
 
        """
 
        q = Session().query(User)\
 
                .join(ChangesetComment.author)
 
        if revision:
 
            q = q.filter(cls.revision == revision)
 
        elif pull_request_id:
 
            q = q.filter(cls.pull_request_id == pull_request_id)
 
        return q.all()
 

	
0 comments (0 inline, 0 general)