Changeset - 4c239e0dcbb7
[Not reviewed]
beta
0 2 0
Marcin Kuzminski - 13 years ago 2012-05-20 16:38:00
marcin@python-works.com
fixes issue #454 Search results under Windows include preceeding backslash
2 files changed with 12 insertions and 8 deletions:
0 comments (0 inline, 0 general)
rhodecode/controllers/search.py
Show inline comments
 
@@ -9,56 +9,57 @@
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import logging
 
import traceback
 

	
 
from pylons.i18n.translation import _
 
from pylons import request, config, tmpl_context as c
 

	
 
from rhodecode.lib.auth import LoginRequired
 
from rhodecode.lib.base import BaseController, render
 
from rhodecode.lib.indexers import SCHEMA, IDX_NAME, ResultWrapper
 
from rhodecode.lib.indexers import SCHEMA, IDX_NAME, WhooshResultWrapper
 

	
 
from webhelpers.paginate import Page
 
from webhelpers.util import update_params
 

	
 
from whoosh.index import open_dir, EmptyIndexError
 
from whoosh.qparser import QueryParser, QueryParserError
 
from whoosh.query import Phrase, Wildcard, Term, Prefix
 
from rhodecode.model.repo import RepoModel
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class SearchController(BaseController):
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(SearchController, self).__before__()
 

	
 
    def index(self, search_repo=None):
 
        c.repo_name = search_repo
 
        c.formated_results = []
 
        c.runtime = ''
 
        c.cur_query = request.GET.get('q', None)
 
        c.cur_type = request.GET.get('type', 'source')
 
        c.cur_search = search_type = {'content': 'content',
 
                                      'commit': 'content',
 
                                      'path': 'path',
 
                                      'repository': 'repository'}\
 
                                      .get(c.cur_type, 'content')
 

	
 
        if c.cur_query:
 
            cur_query = c.cur_query.lower()
 
@@ -78,49 +79,49 @@ class SearchController(BaseController):
 
                    query = qp.parse(unicode(cur_query))
 
                    # extract words for highlight
 
                    if isinstance(query, Phrase):
 
                        highlight_items.update(query.words)
 
                    elif isinstance(query, Prefix):
 
                        highlight_items.add(query.text)
 
                    else:
 
                        for i in query.all_terms():
 
                            if i[0] == 'content':
 
                                highlight_items.add(i[1])
 

	
 
                    matcher = query.matcher(searcher)
 

	
 
                    log.debug(query)
 
                    log.debug(highlight_items)
 
                    results = searcher.search(query)
 
                    res_ln = len(results)
 
                    c.runtime = '%s results (%.3f seconds)' % (
 
                        res_ln, results.runtime
 
                    )
 

	
 
                    def url_generator(**kw):
 
                        return update_params("?q=%s&type=%s" \
 
                                           % (c.cur_query, c.cur_search), **kw)
 

	
 
                    repo_location = RepoModel().repos_path
 
                    c.formated_results = Page(
 
                        ResultWrapper(search_type, searcher, matcher,
 
                                      highlight_items),
 
                        WhooshResultWrapper(search_type, searcher, matcher,
 
                                            highlight_items, repo_location),
 
                        page=p,
 
                        item_count=res_ln,
 
                        items_per_page=10,
 
                        url=url_generator
 
                    )
 

	
 
                except QueryParserError:
 
                    c.runtime = _('Invalid search query. Try quoting it.')
 
                searcher.close()
 
            except (EmptyIndexError, IOError):
 
                log.error(traceback.format_exc())
 
                log.error('Empty Index data')
 
                c.runtime = _('There is no index to search in. '
 
                              'Please run whoosh indexer')
 
            except (Exception):
 
                log.error(traceback.format_exc())
 
                c.runtime = _('An error occurred during this search operation')
 

	
 

	
 
        # Return a rendered template
 
        return render('/search/search.html')
rhodecode/lib/indexers/__init__.py
Show inline comments
 
@@ -108,99 +108,102 @@ class MakeIndex(BasePasterCommand):
 
            l.release()
 
        except LockHeld:
 
            sys.exit(1)
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--repo-location',
 
                          action='store',
 
                          dest='repo_location',
 
                          help="Specifies repositories location to index OPTIONAL",
 
                          )
 
        self.parser.add_option('--index-only',
 
                          action='store',
 
                          dest='repo_list',
 
                          help="Specifies a comma separated list of repositores "
 
                                "to build index on OPTIONAL",
 
                          )
 
        self.parser.add_option('-f',
 
                          action='store_true',
 
                          dest='full_index',
 
                          help="Specifies that index should be made full i.e"
 
                                " destroy old and build from scratch",
 
                          default=False)
 

	
 

	
 
class ResultWrapper(object):
 
    def __init__(self, search_type, searcher, matcher, highlight_items):
 
class WhooshResultWrapper(object):
 
    def __init__(self, search_type, searcher, matcher, highlight_items,
 
                 repo_location):
 
        self.search_type = search_type
 
        self.searcher = searcher
 
        self.matcher = matcher
 
        self.highlight_items = highlight_items
 
        self.fragment_size = 200
 
        self.repo_location = repo_location
 

	
 
    @LazyProperty
 
    def doc_ids(self):
 
        docs_id = []
 
        while self.matcher.is_active():
 
            docnum = self.matcher.id()
 
            chunks = [offsets for offsets in self.get_chunks()]
 
            docs_id.append([docnum, chunks])
 
            self.matcher.next()
 
        return docs_id
 

	
 
    def __str__(self):
 
        return '<%s at %s>' % (self.__class__.__name__, len(self.doc_ids))
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __len__(self):
 
        return len(self.doc_ids)
 

	
 
    def __iter__(self):
 
        """
 
        Allows Iteration over results,and lazy generate content
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for docid in self.doc_ids:
 
            yield self.get_full_content(docid)
 

	
 
    def __getitem__(self, key):
 
        """
 
        Slicing of resultWrapper
 
        """
 
        i, j = key.start, key.stop
 

	
 
        slices = []
 
        for docid in self.doc_ids[i:j]:
 
            slices.append(self.get_full_content(docid))
 
        return slices
 

	
 
    def get_full_content(self, docid):
 
        res = self.searcher.stored_fields(docid[0])
 
        f_path = res['path'][res['path'].find(res['repository']) \
 
                             + len(res['repository']):].lstrip('/')
 
        full_repo_path = jn(self.repo_location, res['repository'])
 
        f_path = res['path'].split(full_repo_path)[-1]
 
        f_path = f_path.lstrip(os.sep)
 

	
 
        content_short = self.get_short_content(res, docid[1])
 
        res.update({'content_short': content_short,
 
                    'content_short_hl': self.highlight(content_short),
 
                    'f_path': f_path})
 

	
 
        return res
 

	
 
    def get_short_content(self, res, chunks):
 

	
 
        return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
 

	
 
    def get_chunks(self):
 
        """
 
        Smart function that implements chunking the content
 
        but not overlap chunks so it doesn't highlight the same
 
        close occurrences twice.
 

	
 
        :param matcher:
 
        :param size:
 
        """
 
        memory = [(0, 0)]
 
        for span in self.matcher.spans():
 
            start = span.startchar or 0
0 comments (0 inline, 0 general)