Changeset - e51ad2cd400e
[Not reviewed]
kallithea/bin/kallithea_cli_repo.py
Show inline comments
 
@@ -7,49 +7,49 @@
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
This file was forked by the Kallithea project in July 2014 and later moved.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Feb 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 
import datetime
 
import os
 
import re
 
import shutil
 

	
 
import click
 

	
 
import kallithea.bin.kallithea_cli_base as cli_base
 
from kallithea.lib.utils import REMOVED_REPO_PAT, repo2db_mapper
 
from kallithea.lib.utils2 import ask_ok, safe_str
 
from kallithea.lib.utils2 import ask_ok
 
from kallithea.model.db import Repository, Ui
 
from kallithea.model.meta import Session
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
@cli_base.register_command(config_file_initialize_app=True)
 
@click.option('--remove-missing', is_flag=True,
 
        help='Remove missing repositories from the Kallithea database.')
 
def repo_scan(remove_missing):
 
    """Scan filesystem for repositories.
 

	
 
    Search the configured repository root for new repositories and add them
 
    into Kallithea.
 
    Additionally, report repositories that were previously known to Kallithea
 
    but are no longer present on the filesystem. If option --remove-missing is
 
    given, remove the missing repositories from the Kallithea database.
 
    """
 
    click.echo('Now scanning root location for new repos ...')
 
    added, removed = repo2db_mapper(ScmModel().repo_scan(),
 
                                    remove_obsolete=remove_missing)
 
    click.echo('Scan completed.')
 
    if added:
 
        click.echo('Added: %s' % ', '.join(added))
 
    if removed:
 
@@ -106,82 +106,81 @@ def repo_purge_deleted(ask, older_than):
 
    def _parse_older_than(val):
 
        regex = re.compile(r'((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)m)?((?P<seconds>\d+?)s)?')
 
        parts = regex.match(val)
 
        if not parts:
 
            return
 
        parts = parts.groupdict()
 
        time_params = {}
 
        for name, param in parts.items():
 
            if param:
 
                time_params[name] = int(param)
 
        return datetime.timedelta(**time_params)
 

	
 
    def _extract_date(name):
 
        """
 
        Extract the date part from rm__<date> pattern of removed repos,
 
        and convert it to datetime object
 

	
 
        :param name:
 
        """
 
        date_part = name[4:19]  # 4:19 since we don't parse milliseconds
 
        return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S')
 

	
 
    repos_location = Ui.get_repos_location()
 
    to_remove = []
 
    for dn_, dirs, f in os.walk(safe_str(repos_location)):
 
    for dn_, dirs, f in os.walk(repos_location):
 
        alldirs = list(dirs)
 
        del dirs[:]
 
        if ('.hg' in alldirs or
 
            '.git' in alldirs or
 
            '.svn' in alldirs or
 
            'objects' in alldirs and ('refs' in alldirs or 'packed-refs' in f)
 
        ):
 
            continue
 
        for loc in alldirs:
 
            if REMOVED_REPO_PAT.match(loc):
 
                to_remove.append([os.path.join(dn_, loc),
 
                                  _extract_date(loc)])
 
            else:
 
                dirs.append(loc)
 
        if dirs:
 
            click.echo('Scanning: %s' % dn_)
 

	
 
    if not to_remove:
 
        click.echo('There are no deleted repositories.')
 
        return
 

	
 
    # filter older than (if present)!
 
    if older_than:
 
        now = datetime.datetime.now()
 
        to_remove_filtered = []
 
        older_than_date = _parse_older_than(older_than)
 
        for name, date_ in to_remove:
 
            repo_age = now - date_
 
            if repo_age > older_than_date:
 
                to_remove_filtered.append([name, date_])
 

	
 
        to_remove = to_remove_filtered
 

	
 
        if not to_remove:
 
            click.echo('There are no deleted repositories older than %s (%s)'
 
                    % (older_than, older_than_date))
 
            return
 

	
 
        click.echo('Considering %s deleted repositories older than %s (%s).'
 
            % (len(to_remove), older_than, older_than_date))
 
    else:
 
        click.echo('Considering %s deleted repositories.' % len(to_remove))
 

	
 
    if not ask:
 
        remove = True
 
    else:
 
        remove = ask_ok('The following repositories will be removed completely:\n%s\n'
 
                'Do you want to proceed? [y/n] '
 
                % '\n'.join(['%s deleted on %s' % (safe_str(x[0]), safe_str(x[1]))
 
                                     for x in to_remove]))
 
            'Do you want to proceed? [y/n] ' %
 
            '\n'.join('%s deleted on %s' % (path, date_) for path, date_ in to_remove))
 

	
 
    if remove:
 
        for path, date_ in to_remove:
 
            click.echo('Purging repository %s' % path)
 
            shutil.rmtree(path)
 
    else:
 
        click.echo('Nothing done, exiting...')
kallithea/controllers/api/__init__.py
Show inline comments
 
@@ -18,63 +18,63 @@ kallithea.controllers.api
 
JSON RPC controller
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Aug 20, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import inspect
 
import itertools
 
import logging
 
import time
 
import traceback
 
import types
 

	
 
from tg import Response, TGController, request, response
 
from webob.exc import HTTPError, HTTPException
 

	
 
from kallithea.lib import ext_json
 
from kallithea.lib.auth import AuthUser
 
from kallithea.lib.base import _get_ip_addr as _get_ip
 
from kallithea.lib.base import get_path_info
 
from kallithea.lib.utils2 import ascii_bytes, safe_str
 
from kallithea.lib.utils2 import ascii_bytes
 
from kallithea.model.db import User
 

	
 

	
 
log = logging.getLogger('JSONRPC')
 

	
 

	
 
class JSONRPCError(BaseException):
 

	
 
    def __init__(self, message):
 
        self.message = message
 
        super(JSONRPCError, self).__init__()
 

	
 
    def __str__(self):
 
        return safe_str(self.message)
 
        return self.message
 

	
 

	
 
class JSONRPCErrorResponse(Response, HTTPException):
 
    """
 
    Generate a Response object with a JSON-RPC error body
 
    """
 

	
 
    def __init__(self, message=None, retid=None, code=None):
 
        HTTPException.__init__(self, message, self)
 
        Response.__init__(self,
 
                          json_body=dict(id=retid, result=None, error=message),
 
                          status=code,
 
                          content_type='application/json')
 

	
 

	
 
class JSONRPCController(TGController):
 
    """
 
     A WSGI-speaking JSON-RPC controller class
 

	
 
     See the specification:
 
     <http://json-rpc.org/wiki/specification>`.
 

	
 
     Valid controller return values should be json-serializable objects.
 

	
kallithea/controllers/compare.py
Show inline comments
 
@@ -22,49 +22,49 @@ This file was forked by the Kallithea pr
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 6, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import re
 

	
 
import mercurial.unionrepo
 
from tg import request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPBadRequest, HTTPFound, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.controllers.changeset import _context_url, _ignorews_url
 
from kallithea.lib import diffs
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.graphmod import graph_data
 
from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes, safe_int, safe_str
 
from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes, safe_int
 
from kallithea.model.db import Repository
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class CompareController(BaseRepoController):
 

	
 
    def _before(self, *args, **kwargs):
 
        super(CompareController, self)._before(*args, **kwargs)
 

	
 
        # The base repository has already been retrieved.
 
        c.a_repo = c.db_repo
 

	
 
        # Retrieve the "changeset" repository (default: same as base).
 
        other_repo = request.GET.get('other_repo', None)
 
        if other_repo is None:
 
            c.cs_repo = c.a_repo
 
        else:
 
            c.cs_repo = Repository.get_by_repo_name(other_repo)
 
            if c.cs_repo is None:
 
                msg = _('Could not find other repository %s') % other_repo
 
                h.flash(msg, category='error')
 
                raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
 
@@ -114,52 +114,52 @@ class CompareController(BaseRepoControll
 
            else:
 
                log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
 
                ancestors = [ascii_str(hgrepo[ancestor].hex()) for ancestor in
 
                             hgrepo.revs(b"heads(::id(%s) & ::id(%s))", ascii_bytes(org_rev), ascii_bytes(other_rev))] # FIXME: expensive!
 

	
 
            other_changesets = [
 
                other_repo.get_changeset(rev)
 
                for rev in hgrepo.revs(
 
                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
 
                    ascii_bytes(other_rev), ascii_bytes(org_rev), ascii_bytes(org_rev))
 
            ]
 
            org_changesets = [
 
                org_repo.get_changeset(ascii_str(hgrepo[rev].hex()))
 
                for rev in hgrepo.revs(
 
                    b"ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
 
                    ascii_bytes(org_rev), ascii_bytes(other_rev), ascii_bytes(other_rev))
 
            ]
 

	
 
        elif alias == 'git':
 
            if org_repo != other_repo:
 
                from dulwich.repo import Repo
 
                from dulwich.client import SubprocessGitClient
 

	
 
                gitrepo = Repo(org_repo.path)
 
                SubprocessGitClient(thin_packs=False).fetch(safe_str(other_repo.path), gitrepo)
 
                SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo)
 

	
 
                gitrepo_remote = Repo(other_repo.path)
 
                SubprocessGitClient(thin_packs=False).fetch(safe_str(org_repo.path), gitrepo_remote)
 
                SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote)
 

	
 
                revs = [
 
                    ascii_str(x.commit.id)
 
                    for x in gitrepo_remote.get_walker(include=[ascii_bytes(other_rev)],
 
                                                       exclude=[ascii_bytes(org_rev)])
 
                ]
 
                other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)]
 
                if other_changesets:
 
                    ancestors = [other_changesets[0].parents[0].raw_id]
 
                else:
 
                    # no changesets from other repo, ancestor is the other_rev
 
                    ancestors = [other_rev]
 

	
 
                gitrepo.close()
 
                gitrepo_remote.close()
 

	
 
            else:
 
                so = org_repo.run_git_command(
 
                    ['log', '--reverse', '--pretty=format:%H',
 
                     '-s', '%s..%s' % (org_rev, other_rev)]
 
                )
 
                other_changesets = [org_repo.get_changeset(cs)
 
                              for cs in re.findall(r'[0-9a-fA-F]{40}', so)]
 
                so = org_repo.run_git_command(
kallithea/controllers/files.py
Show inline comments
 
@@ -25,49 +25,49 @@ Original author and date, and relevant c
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import os
 
import posixpath
 
import shutil
 
import tempfile
 
import traceback
 
from collections import OrderedDict
 

	
 
from tg import request, response
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.controllers.changeset import _context_url, _ignorews_url, anchor_url, get_ignore_ws, get_line_ctx
 
from kallithea.lib import diffs
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 
from kallithea.lib.base import BaseRepoController, jsonify, render
 
from kallithea.lib.exceptions import NonRelativePathError
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_str, safe_unicode, str2bool
 
from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_unicode, str2bool
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import (
 
    ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError, NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.model.db import Repository
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FilesController(BaseRepoController):
 

	
 
    def _before(self, *args, **kwargs):
 
        super(FilesController, self)._before(*args, **kwargs)
 

	
 
    def __get_cs(self, rev, silent_empty=False):
 
        """
 
        Safe way to get changeset if error occur it redirects to tip with
 
        proper message
 

	
 
        :param rev: revision to fetch
 
@@ -211,95 +211,94 @@ class FilesController(BaseRepoController
 
            data = {
 
                'more': False,
 
                'results': res
 
            }
 
            return data
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def authors(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 
            c.authors = []
 
            for a in set([x.author for x in _hist]):
 
                c.authors.append((h.email(a), h.person(a)))
 
            return render('files/files_history_box.html')
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def rawfile(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        response.content_disposition = 'attachment; filename=%s' % \
 
            safe_str(f_path.split(Repository.url_sep())[-1])
 
        response.content_disposition = \
 
            'attachment; filename=%s' % f_path.split(Repository.url_sep())[-1]
 

	
 
        response.content_type = file_node.mimetype
 
        return file_node.content
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def raw(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        raw_mimetype_mapping = {
 
            # map original mimetype to a mimetype used for "show as raw"
 
            # you can also provide a content-disposition to override the
 
            # default "attachment" disposition.
 
            # orig_type: (new_type, new_dispo)
 

	
 
            # show images inline:
 
            'image/x-icon': ('image/x-icon', 'inline'),
 
            'image/png': ('image/png', 'inline'),
 
            'image/gif': ('image/gif', 'inline'),
 
            'image/jpeg': ('image/jpeg', 'inline'),
 
            'image/svg+xml': ('image/svg+xml', 'inline'),
 
        }
 

	
 
        mimetype = file_node.mimetype
 
        try:
 
            mimetype, dispo = raw_mimetype_mapping[mimetype]
 
        except KeyError:
 
            # we don't know anything special about this, handle it safely
 
            if file_node.is_binary:
 
                # do same as download raw for binary files
 
                mimetype, dispo = 'application/octet-stream', 'attachment'
 
            else:
 
                # do not just use the original mimetype, but force text/plain,
 
                # otherwise it would serve text/html and that might be unsafe.
 
                # Note: underlying vcs library fakes text/plain mimetype if the
 
                # mimetype can not be determined and it thinks it is not
 
                # binary.This might lead to erroneous text display in some
 
                # cases, but helps in other cases, like with text files
 
                # without extension.
 
                mimetype, dispo = 'text/plain', 'inline'
 

	
 
        if dispo == 'attachment':
 
            dispo = 'attachment; filename=%s' % \
 
                        safe_str(f_path.split(os.sep)[-1])
 
            dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
 

	
 
        response.content_disposition = dispo
 
        response.content_type = mimetype
 
        return file_node.content
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionLevelDecorator('write')
 
    def delete(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches and revision not in _branches.values():
 
            h.flash(_('You can only delete files with revision '
 
                      'being a valid branch'), category='warning')
 
            raise HTTPFound(location=h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 
@@ -487,50 +486,49 @@ class FilesController(BaseRepoController
 
                ext = ext_data[1]
 

	
 
        try:
 
            dbrepo = RepoModel().get_by_repo_name(repo_name)
 
            if not dbrepo.enable_downloads:
 
                return _('Downloads disabled') # TODO: do something else?
 

	
 
            if c.db_repo_scm_instance.alias == 'hg':
 
                # patch and reset hooks section of UI config to not run any
 
                # hooks on fetching archives with subrepos
 
                for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'):
 
                    c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None)
 

	
 
            cs = c.db_repo_scm_instance.get_changeset(revision)
 
            content_type = settings.ARCHIVE_SPECS[fileformat][0]
 
        except ChangesetDoesNotExistError:
 
            return _('Unknown revision %s') % revision
 
        except EmptyRepositoryError:
 
            return _('Empty repository')
 
        except (ImproperArchiveTypeError, KeyError):
 
            return _('Unknown archive type')
 

	
 
        from kallithea import CONFIG
 
        rev_name = cs.raw_id[:12]
 
        archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')),
 
                                    safe_str(rev_name), ext)
 
        archive_name = '%s-%s%s' % (repo_name.replace('/', '_'), rev_name, ext)
 

	
 
        archive_path = None
 
        cached_archive_path = None
 
        archive_cache_dir = CONFIG.get('archive_cache_dir')
 
        if archive_cache_dir and not subrepos: # TODO: subrepo caching?
 
            if not os.path.isdir(archive_cache_dir):
 
                os.makedirs(archive_cache_dir)
 
            cached_archive_path = os.path.join(archive_cache_dir, archive_name)
 
            if os.path.isfile(cached_archive_path):
 
                log.debug('Found cached archive in %s', cached_archive_path)
 
                archive_path = cached_archive_path
 
            else:
 
                log.debug('Archive %s is not yet cached', archive_name)
 

	
 
        if archive_path is None:
 
            # generate new archive
 
            fd, archive_path = tempfile.mkstemp()
 
            log.debug('Creating new temp archive in %s', archive_path)
 
            with os.fdopen(fd, 'wb') as stream:
 
                cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
 
                # stream (and thus fd) has been closed by cs.fill_archive
 
            if cached_archive_path is not None:
 
                # we generated the archive - move it to cache
 
                log.debug('Storing new archive in %s', cached_archive_path)
kallithea/controllers/login.py
Show inline comments
 
@@ -20,76 +20,75 @@ Login controller for Kallithea
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 22, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import re
 

	
 
import formencode
 
from formencode import htmlfill
 
from tg import request, session
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPBadRequest, HTTPFound
 

	
 
import kallithea.lib.helpers as h
 
from kallithea.config.routing import url
 
from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator
 
from kallithea.lib.base import BaseController, log_in_user, render
 
from kallithea.lib.exceptions import UserCreationError
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.model.db import Setting, User
 
from kallithea.model.forms import LoginForm, PasswordResetConfirmationForm, PasswordResetRequestForm, RegisterForm
 
from kallithea.model.meta import Session
 
from kallithea.model.user import UserModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class LoginController(BaseController):
 

	
 
    def _validate_came_from(self, came_from,
 
            _re=re.compile(r"/(?!/)[-!#$%&'()*+,./:;=?@_~0-9A-Za-z]*$")):
 
        """Return True if came_from is valid and can and should be used.
 

	
 
        Determines if a URI reference is valid and relative to the origin;
 
        or in RFC 3986 terms, whether it matches this production:
 

	
 
          origin-relative-ref = path-absolute [ "?" query ] [ "#" fragment ]
 

	
 
        with the exception that '%' escapes are not validated and '#' is
 
        allowed inside the fragment part.
 
        """
 
        return _re.match(came_from) is not None
 

	
 
    def index(self):
 
        c.came_from = safe_str(request.GET.get('came_from', ''))
 
        c.came_from = request.GET.get('came_from', '')
 
        if c.came_from:
 
            if not self._validate_came_from(c.came_from):
 
                log.error('Invalid came_from (not server-relative): %r', c.came_from)
 
                raise HTTPBadRequest()
 
        else:
 
            c.came_from = url('home')
 

	
 
        if request.POST:
 
            # import Login Form validator class
 
            login_form = LoginForm()()
 
            try:
 
                c.form_result = login_form.to_python(dict(request.POST))
 
                # form checks for username/password, now we're authenticated
 
                username = c.form_result['username']
 
                user = User.get_by_username_or_email(username, case_insensitive=True)
 
            except formencode.Invalid as errors:
 
                defaults = errors.value
 
                # remove password from filling in form again
 
                defaults.pop('password', None)
 
                return htmlfill.render(
 
                    render('/login.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
kallithea/controllers/pullrequests.py
Show inline comments
 
@@ -22,93 +22,87 @@ Original author and date, and relevant c
 
:created_on: May 7, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 

	
 
import formencode
 
import mercurial.unionrepo
 
from tg import request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPBadRequest, HTTPForbidden, HTTPFound, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.controllers.changeset import _context_url, _ignorews_url, create_cs_pr_comment, delete_cs_pr_comment
 
from kallithea.lib import diffs
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 
from kallithea.lib.base import BaseRepoController, jsonify, render
 
from kallithea.lib.graphmod import graph_data
 
from kallithea.lib.page import Page
 
from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int, safe_str
 
from kallithea.lib.utils2 import ascii_bytes, safe_bytes, safe_int
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.db import ChangesetStatus, PullRequest, PullRequestReviewer, Repository, User
 
from kallithea.model.forms import PullRequestForm, PullRequestPostForm
 
from kallithea.model.meta import Session
 
from kallithea.model.pull_request import CreatePullRequestAction, CreatePullRequestIterationAction, PullRequestModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _get_reviewer(user_id):
 
    """Look up user by ID and validate it as a potential reviewer."""
 
    try:
 
        user = User.get(int(user_id))
 
    except ValueError:
 
        user = None
 

	
 
    if user is None or user.is_default_user:
 
        h.flash(_('Invalid reviewer "%s" specified') % user_id, category='error')
 
        raise HTTPBadRequest()
 

	
 
    return user
 

	
 

	
 
class PullrequestsController(BaseRepoController):
 

	
 
    def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None):
 
        """return a structure with repo's interesting changesets, suitable for
 
        the selectors in pullrequest.html
 

	
 
        rev: a revision that must be in the list somehow and selected by default
 
        branch: a branch that must be in the list and selected by default - even if closed
 
        branch_rev: a revision of which peers should be preferred and available."""
 
        # list named branches that has been merged to this named branch - it should probably merge back
 
        peers = []
 

	
 
        if rev:
 
            rev = safe_str(rev)
 

	
 
        if branch:
 
            branch = safe_str(branch)
 

	
 
        if branch_rev:
 
            # a revset not restricting to merge() would be better
 
            # (especially because it would get the branch point)
 
            # ... but is currently too expensive
 
            # including branches of children could be nice too
 
            peerbranches = set()
 
            for i in repo._repo.revs(
 
                b"sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)",
 
                ascii_bytes(branch_rev), ascii_bytes(branch_rev),
 
            ):
 
                for abranch in repo.get_changeset(i).branches:
 
                    if abranch not in peerbranches:
 
                        n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id)
 
                        peers.append((n, abranch))
 
                        peerbranches.add(abranch)
 

	
 
        selected = None
 
        tiprev = repo.tags.get('tip')
 
        tipbranch = None
 

	
 
        branches = []
 
        for abranch, branchrev in repo.branches.items():
 
            n = 'branch:%s:%s' % (abranch, branchrev)
 
            desc = abranch
 
@@ -565,49 +559,49 @@ class PullrequestsController(BaseRepoCon
 
                c.cs_repo.scm_instance.get_changeset(c.cs_rev) # check it exists - raise ChangesetDoesNotExistError if not
 
                c.update_msg = _("Git pull requests don't support iterating yet.")
 
        except ChangesetDoesNotExistError:
 
            c.update_msg = _('Error: some changesets not found when displaying pull request from %s.') % c.cs_rev
 

	
 
        c.avail_revs = avail_revs
 
        c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show]
 
        c.avail_jsdata = graph_data(org_scm_instance, avail_show)
 

	
 
        raw_ids = [x.raw_id for x in c.cs_ranges]
 
        c.cs_comments = c.cs_repo.get_comments(raw_ids)
 
        c.cs_statuses = c.cs_repo.statuses(raw_ids)
 

	
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = safe_int(request.GET.get('context'), 3)
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        fulldiff = request.GET.get('fulldiff')
 
        diff_limit = None if fulldiff else self.cut_off_limit
 

	
 
        # we swap org/other ref since we run a simple diff on one repo
 
        log.debug('running diff between %s and %s in %s',
 
                  c.a_rev, c.cs_rev, org_scm_instance.path)
 
        try:
 
            raw_diff = diffs.get_diff(org_scm_instance, rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
 
            raw_diff = diffs.get_diff(org_scm_instance, rev1=c.a_rev, rev2=c.cs_rev,
 
                                      ignore_whitespace=ignore_whitespace, context=line_context)
 
        except ChangesetDoesNotExistError:
 
            raw_diff = safe_bytes(_("The diff can't be shown - the PR revisions could not be found."))
 
        diff_processor = diffs.DiffProcessor(raw_diff, diff_limit=diff_limit)
 
        c.limited_diff = diff_processor.limited_diff
 
        c.file_diff_data = []
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 

	
 
        for f in diff_processor.parsed:
 
            st = f['stats']
 
            c.lines_added += st['added']
 
            c.lines_deleted += st['deleted']
 
            filename = f['filename']
 
            fid = h.FID('', filename)
 
            html_diff = diffs.as_html(enable_comments=True, parsed_lines=[f])
 
            c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, html_diff, st))
 

	
 
        # inline comments
 
        c.inline_cnt = 0
 
        c.inline_comments = cc_model.get_inline_comments(
 
                                c.db_repo.repo_id,
 
                                pull_request=pull_request_id)
 
        # count inline comments
kallithea/controllers/search.py
Show inline comments
 
@@ -18,49 +18,49 @@ kallithea.controllers.search
 
Search controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Aug 7, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 

	
 
from tg import config, request
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from whoosh.index import EmptyIndexError, exists_in, open_dir
 
from whoosh.qparser import QueryParser, QueryParserError
 
from whoosh.query import Phrase, Prefix
 

	
 
from kallithea.lib.auth import LoginRequired
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA, WhooshResultWrapper
 
from kallithea.lib.page import Page
 
from kallithea.lib.utils2 import safe_int, safe_str
 
from kallithea.lib.utils2 import safe_int
 
from kallithea.model.repo import RepoModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class SearchController(BaseRepoController):
 

	
 
    @LoginRequired(allow_default_user=True)
 
    def index(self, repo_name=None):
 
        c.repo_name = repo_name
 
        c.formated_results = []
 
        c.runtime = ''
 
        c.cur_query = request.GET.get('q', None)
 
        c.cur_type = request.GET.get('type', 'content')
 
        c.cur_search = search_type = {'content': 'content',
 
                                      'commit': 'message',
 
                                      'path': 'path',
 
                                      'repository': 'repository'
 
                                      }.get(c.cur_type, 'content')
 

	
 
        index_name = {
 
            'content': IDX_NAME,
 
            'commit': CHGSET_IDX_NAME,
 
@@ -103,40 +103,40 @@ class SearchController(BaseRepoControlle
 
                    elif isinstance(query, Prefix):
 
                        highlight_items.add(query.text)
 
                    else:
 
                        for i in query.all_terms():
 
                            if i[0] in ['content', 'message']:
 
                                highlight_items.add(i[1])
 

	
 
                    matcher = query.matcher(searcher)
 

	
 
                    log.debug('query: %s', query)
 
                    log.debug('hl terms: %s', highlight_items)
 
                    results = searcher.search(query)
 
                    res_ln = len(results)
 
                    c.runtime = '%s results (%.3f seconds)' % (
 
                        res_ln, results.runtime
 
                    )
 

	
 
                    repo_location = RepoModel().repos_path
 
                    c.formated_results = Page(
 
                        WhooshResultWrapper(search_type, searcher, matcher,
 
                                            highlight_items, repo_location),
 
                        page=p,
 
                        item_count=res_ln,
 
                        items_per_page=10,
 
                        type=safe_str(c.cur_type),
 
                        q=safe_str(c.cur_query),
 
                        type=c.cur_type,
 
                        q=c.cur_query,
 
                    )
 

	
 
                except QueryParserError:
 
                    c.runtime = _('Invalid search query. Try quoting it.')
 
                searcher.close()
 
            except EmptyIndexError:
 
                log.error("Empty search index - run 'kallithea-cli index-create' regularly")
 
                c.runtime = _('The server has no search index.')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                c.runtime = _('An error occurred during search operation.')
 

	
 
        # Return a rendered template
 
        return render('/search/search.html')
kallithea/lib/auth_modules/auth_container.py
Show inline comments
 
@@ -8,49 +8,49 @@
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth_modules.auth_container
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea container based authentication plugin
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Created on Nov 17, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 

	
 
from kallithea.lib import auth_modules
 
from kallithea.lib.compat import hybrid_property
 
from kallithea.lib.utils2 import safe_str, str2bool
 
from kallithea.lib.utils2 import str2bool
 
from kallithea.model.db import Setting
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin):
 
    def __init__(self):
 
        pass
 

	
 
    @hybrid_property
 
    def name(self):
 
        return "container"
 

	
 
    @hybrid_property
 
    def is_container_auth(self):
 
        return True
 

	
 
    def settings(self):
 

	
 
        settings = [
 
            {
 
                "name": "header",
 
                "validator": self.validators.UnicodeString(strip=True, not_empty=True),
 
@@ -159,49 +159,49 @@ class KallitheaAuthPlugin(auth_modules.K
 
        is set. clean_username extracts the username from this data if it's
 
        having @ in it.
 
        Return None on failure. On success, return a dictionary of the form:
 

	
 
            see: KallitheaAuthPluginBase.auth_func_attrs
 

	
 
        :param userobj:
 
        :param username:
 
        :param password:
 
        :param settings:
 
        :param kwargs:
 
        """
 
        environ = kwargs.get('environ')
 
        if not environ:
 
            log.debug('Empty environ data skipping...')
 
            return None
 

	
 
        if not userobj:
 
            userobj = self.get_user('', environ=environ, settings=settings)
 

	
 
        # we don't care passed username/password for container auth plugins.
 
        # only way to log in is using environ
 
        username = None
 
        if userobj:
 
            username = safe_str(getattr(userobj, 'username'))
 
            username = getattr(userobj, 'username')
 

	
 
        if not username:
 
            # we don't have any objects in DB, user doesn't exist, extract
 
            # username from environ based on the settings
 
            username = self._get_username(environ, settings)
 

	
 
        # if cannot fetch username, it's a no-go for this plugin to proceed
 
        if not username:
 
            return None
 

	
 
        # old attrs fetched from Kallithea database
 
        admin = getattr(userobj, 'admin', False)
 
        email = environ.get(settings.get('email_header'), getattr(userobj, 'email', ''))
 
        firstname = environ.get(settings.get('firstname_header'), getattr(userobj, 'firstname', ''))
 
        lastname = environ.get(settings.get('lastname_header'), getattr(userobj, 'lastname', ''))
 

	
 
        user_data = {
 
            'username': username,
 
            'firstname': firstname or username,
 
            'lastname': lastname or '',
 
            'groups': [],
 
            'email': email or '',
 
            'admin': admin or False,
 
            'extern_name': username,
kallithea/lib/auth_modules/auth_ldap.py
Show inline comments
 
@@ -10,92 +10,91 @@
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth_modules.auth_ldap
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea authentication plugin for LDAP
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Created on Nov 17, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 

	
 
from kallithea.lib import auth_modules
 
from kallithea.lib.compat import hybrid_property
 
from kallithea.lib.exceptions import LdapConnectionError, LdapImportError, LdapPasswordError, LdapUsernameError
 
from kallithea.lib.utils2 import safe_str
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
try:
 
    import ldap
 
    import ldap.filter
 
except ImportError:
 
    # means that python-ldap is not installed
 
    ldap = None
 

	
 

	
 
class AuthLdap(object):
 

	
 
    def __init__(self, server, base_dn, port=None, bind_dn='', bind_pass='',
 
                 tls_kind='LDAPS', tls_reqcert='DEMAND', cacertdir=None, ldap_version=3,
 
                 ldap_filter='(&(objectClass=user)(!(objectClass=computer)))',
 
                 search_scope='SUBTREE', attr_login='uid'):
 
        if ldap is None:
 
            raise LdapImportError
 

	
 
        self.ldap_version = ldap_version
 

	
 
        self.TLS_KIND = tls_kind
 
        OPT_X_TLS_DEMAND = 2
 
        self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert,
 
                                   OPT_X_TLS_DEMAND)
 
        self.cacertdir = cacertdir
 

	
 
        protocol = 'ldaps' if self.TLS_KIND == 'LDAPS' else 'ldap'
 
        if not port:
 
            port = 636 if self.TLS_KIND == 'LDAPS' else 389
 
        self.LDAP_SERVER = str(', '.join(
 
            "%s://%s:%s" % (protocol,
 
                            host.strip(),
 
                            port)
 
            for host in server.split(',')))
 

	
 
        self.LDAP_BIND_DN = safe_str(bind_dn)
 
        self.LDAP_BIND_PASS = safe_str(bind_pass)
 
        self.LDAP_BIND_DN = bind_dn
 
        self.LDAP_BIND_PASS = bind_pass
 

	
 
        self.BASE_DN = safe_str(base_dn)
 
        self.LDAP_FILTER = safe_str(ldap_filter)
 
        self.BASE_DN = base_dn
 
        self.LDAP_FILTER = ldap_filter
 
        self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope)
 
        self.attr_login = attr_login
 

	
 
    def authenticate_ldap(self, username, password):
 
        """
 
        Authenticate a user via LDAP and return his/her LDAP properties.
 

	
 
        Raises AuthenticationError if the credentials are rejected, or
 
        EnvironmentError if the LDAP server can't be reached.
 

	
 
        :param username: username
 
        :param password: password
 
        """
 

	
 
        if not password:
 
            log.debug("Attempt to authenticate LDAP user "
 
                      "with blank password rejected.")
 
            raise LdapPasswordError()
 
        if "," in username:
 
            raise LdapUsernameError("invalid character in username: ,")
 
        try:
 
            if self.cacertdir:
 
                if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'):
 
                    ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, self.cacertdir)
 
@@ -118,49 +117,49 @@ class AuthLdap(object):
 
                server.start_tls_s()
 

	
 
            if self.LDAP_BIND_DN and self.LDAP_BIND_PASS:
 
                log.debug('Trying simple_bind with password and given DN: %s',
 
                          self.LDAP_BIND_DN)
 
                server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS)
 

	
 
            filter_ = '(&%s(%s=%s))' % (self.LDAP_FILTER,
 
                                        ldap.filter.escape_filter_chars(self.attr_login),
 
                                        ldap.filter.escape_filter_chars(username))
 
            log.debug("Authenticating %r filter %s at %s", self.BASE_DN,
 
                      filter_, self.LDAP_SERVER)
 
            lobjects = server.search_ext_s(self.BASE_DN, self.SEARCH_SCOPE,
 
                                           filter_)
 

	
 
            if not lobjects:
 
                raise ldap.NO_SUCH_OBJECT()
 

	
 
            for (dn, _attrs) in lobjects:
 
                if dn is None:
 
                    continue
 

	
 
                try:
 
                    log.debug('Trying simple bind with %s', dn)
 
                    server.simple_bind_s(dn, safe_str(password))
 
                    server.simple_bind_s(dn, password)
 
                    results = server.search_ext_s(dn, ldap.SCOPE_BASE,
 
                                                  '(objectClass=*)')
 
                    if len(results) == 1:
 
                        dn_, attrs = results[0]
 
                        assert dn_ == dn
 
                        return dn, attrs
 

	
 
                except ldap.INVALID_CREDENTIALS:
 
                    log.debug("LDAP rejected password for user '%s': %s",
 
                              username, dn)
 
                    continue # accept authentication as another ldap user with same username
 

	
 
            log.debug("No matching LDAP objects for authentication "
 
                      "of '%s'", username)
 
            raise LdapPasswordError()
 

	
 
        except ldap.NO_SUCH_OBJECT:
 
            log.debug("LDAP says no such user '%s'", username)
 
            raise LdapUsernameError()
 
        except ldap.SERVER_DOWN:
 
            # [0] might be {'info': "TLS error -8179:Peer's Certificate issuer is not recognized.", 'desc': "Can't contact LDAP server"}
 
            raise LdapConnectionError("LDAP can't connect to authentication server")
 

	
 

	
kallithea/lib/base.py
Show inline comments
 
@@ -28,49 +28,49 @@ Original author and date, and relevant c
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import base64
 
import datetime
 
import logging
 
import traceback
 
import warnings
 

	
 
import decorator
 
import paste.auth.basic
 
import paste.httpexceptions
 
import paste.httpheaders
 
import webob.exc
 
from tg import TGController, config, render_template, request, response, session
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 

	
 
from kallithea import BACKENDS, __version__
 
from kallithea.config.routing import url
 
from kallithea.lib import auth_modules, ext_json
 
from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
 
from kallithea.lib.exceptions import UserCreationError
 
from kallithea.lib.utils import get_repo_slug, is_valid_repo
 
from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, safe_str, safe_unicode, set_hook_environment, str2bool
 
from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, safe_unicode, set_hook_environment, str2bool
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError
 
from kallithea.model import meta
 
from kallithea.model.db import PullRequest, Repository, Setting, User
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def render(template_path):
 
    return render_template({'url': url}, 'mako', template_path)
 

	
 

	
 
def _filter_proxy(ip):
 
    """
 
    HEADERS can have multiple ips inside the left-most being the original
 
    client, and each successive proxy that passed the request adding the IP
 
    address where it received the request from.
 

	
 
    :param ip:
 
    """
 
    if ',' in ip:
 
        _ips = ip.split(',')
 
        _first_ip = _ips[0].strip()
 
@@ -221,49 +221,49 @@ class BaseVCSController(object):
 
        default_user = User.get_default_user(cache=True)
 
        default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr)
 
        if default_authuser is None:
 
            log.debug('No anonymous access at all') # move on to proper user auth
 
        else:
 
            if self._check_permission(action, default_authuser, repo_name):
 
                return default_authuser, None
 
            log.debug('Not authorized to access this repository as anonymous user')
 

	
 
        username = None
 
        #==============================================================
 
        # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
 
        # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
 
        #==============================================================
 

	
 
        # try to auth based on environ, container auth methods
 
        log.debug('Running PRE-AUTH for container based authentication')
 
        pre_auth = auth_modules.authenticate('', '', environ)
 
        if pre_auth is not None and pre_auth.get('username'):
 
            username = pre_auth['username']
 
        log.debug('PRE-AUTH got %s as username', username)
 

	
 
        # If not authenticated by the container, running basic auth
 
        if not username:
 
            self.authenticate.realm = safe_str(self.config['realm'])
 
            self.authenticate.realm = self.config['realm']
 
            result = self.authenticate(environ)
 
            if isinstance(result, str):
 
                paste.httpheaders.AUTH_TYPE.update(environ, 'basic')
 
                paste.httpheaders.REMOTE_USER.update(environ, result)
 
                username = result
 
            else:
 
                return None, result.wsgi_application
 

	
 
        #==============================================================
 
        # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
 
        #==============================================================
 
        try:
 
            user = User.get_by_username_or_email(username)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return None, webob.exc.HTTPInternalServerError()
 

	
 
        authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr)
 
        if authuser is None:
 
            return None, webob.exc.HTTPForbidden()
 
        if not self._check_permission(action, authuser, repo_name):
 
            return None, webob.exc.HTTPForbidden()
 

	
 
        return user, None
 
@@ -312,49 +312,49 @@ class BaseVCSController(object):
 
            # quick check if repo exists...
 
            if not is_valid_repo(parsed_request.repo_name, self.basepath, self.scm_alias):
 
                raise webob.exc.HTTPNotFound()
 

	
 
            if parsed_request.action is None:
 
                # Note: the client doesn't get the helpful error message
 
                raise webob.exc.HTTPBadRequest('Unable to detect pull/push action for %r! Are you using a nonstandard command or client?' % parsed_request.repo_name)
 

	
 
            #======================================================================
 
            # CHECK PERMISSIONS
 
            #======================================================================
 
            ip_addr = self._get_ip_addr(environ)
 
            user, response_app = self._authorize(environ, parsed_request.action, parsed_request.repo_name, ip_addr)
 
            if response_app is not None:
 
                return response_app(environ, start_response)
 

	
 
            #======================================================================
 
            # REQUEST HANDLING
 
            #======================================================================
 
            set_hook_environment(user.username, ip_addr,
 
                parsed_request.repo_name, self.scm_alias, parsed_request.action)
 

	
 
            try:
 
                log.info('%s action on %s repo "%s" by "%s" from %s',
 
                         parsed_request.action, self.scm_alias, parsed_request.repo_name, safe_str(user.username), ip_addr)
 
                         parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr)
 
                app = self._make_app(parsed_request)
 
                return app(environ, start_response)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise webob.exc.HTTPInternalServerError()
 

	
 
        except webob.exc.HTTPException as e:
 
            return e(environ, start_response)
 

	
 

	
 
class BaseController(TGController):
 

	
 
    def _before(self, *args, **kwargs):
 
        """
 
        _before is called before controller methods and after __call__
 
        """
 
        if request.needs_csrf_check:
 
            # CSRF protection: Whenever a request has ambient authority (whether
 
            # through a session cookie or its origin IP address), it must include
 
            # the correct token, unless the HTTP method is GET or HEAD (and thus
 
            # guaranteed to be side effect free. In practice, the only situation
 
            # where we allow side effects without ambient authority is when the
 
            # authority comes from an API key; and that is handled above.
 
            from kallithea.lib import helpers as h
kallithea/lib/caching_query.py
Show inline comments
 
@@ -3,50 +3,48 @@
 
"""caching_query.py
 

	
 
Represent persistence structures which allow the usage of
 
Beaker caching with SQLAlchemy.
 

	
 
The three new concepts introduced here are:
 

	
 
 * CachingQuery - a Query subclass that caches and
 
   retrieves results in/from Beaker.
 
 * FromCache - a query option that establishes caching
 
   parameters on a Query
 
 * _params_from_query - extracts value parameters from
 
   a Query.
 

	
 
The rest of what's here are standard SQLAlchemy and
 
Beaker constructs.
 

	
 
"""
 
import beaker
 
from beaker.exceptions import BeakerException
 
from sqlalchemy.orm.interfaces import MapperOption
 
from sqlalchemy.orm.query import Query
 
from sqlalchemy.sql import visitors
 

	
 
from kallithea.lib.utils2 import safe_str
 

	
 

	
 
class CachingQuery(Query):
 
    """A Query subclass which optionally loads full results from a Beaker
 
    cache region.
 

	
 
    The CachingQuery stores additional state that allows it to consult
 
    a Beaker cache before accessing the database:
 

	
 
    * A "region", which is a cache region argument passed to a
 
      Beaker CacheManager, specifies a particular cache configuration
 
      (including backend implementation, expiration times, etc.)
 
    * A "namespace", which is a qualifying name that identifies a
 
      group of keys within the cache.  A query that filters on a name
 
      might use the name "by_name", a query that filters on a date range
 
      to a joined table might use the name "related_date_range".
 

	
 
    When the above state is present, a Beaker cache is retrieved.
 

	
 
    The "namespace" name is first concatenated with
 
    a string composed of the individual entities and columns the Query
 
    requests, i.e. such as ``Query(User.id, User.name)``.
 

	
 
    The Beaker cache is then loaded from the cache manager based
 
    on the region and composed namespace.  The key within the cache
 
@@ -154,49 +152,49 @@ def _get_cache_parameters(query):
 
    # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
 

	
 
    return cache, cache_key
 

	
 

	
 
def _namespace_from_query(namespace, query):
 
    # cache namespace - the token handed in by the
 
    # option + class we're querying against
 
    namespace = " ".join([namespace] + [str(x) for x in query._entities])
 

	
 
    # memcached wants this
 
    namespace = namespace.replace(' ', '_')
 

	
 
    return namespace
 

	
 

	
 
def _set_cache_parameters(query, region, namespace, cache_key):
 

	
 
    if hasattr(query, '_cache_parameters'):
 
        region, namespace, cache_key = query._cache_parameters
 
        raise ValueError("This query is already configured "
 
                        "for region %r namespace %r" %
 
                        (region, namespace)
 
                    )
 
    query._cache_parameters = region, safe_str(namespace), cache_key
 
    query._cache_parameters = region, namespace, cache_key
 

	
 

	
 
class FromCache(MapperOption):
 
    """Specifies that a Query should load results from a cache."""
 

	
 
    propagate_to_loaders = False
 

	
 
    def __init__(self, region, namespace, cache_key=None):
 
        """Construct a new FromCache.
 

	
 
        :param region: the cache region.  Should be a
 
        region configured in the Beaker CacheManager.
 

	
 
        :param namespace: the cache namespace.  Should
 
        be a name uniquely describing the target Query's
 
        lexical structure.
 

	
 
        :param cache_key: optional.  A string cache key
 
        that will serve as the key to the query.   Use this
 
        if your query has a huge amount of parameters (such
 
        as when using in_()) which correspond more simply to
 
        some other identifier.
 

	
 
        """
kallithea/lib/hooks.py
Show inline comments
 
@@ -13,59 +13,59 @@
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.hooks
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Hooks run by Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Aug 6, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import sys
 
import time
 

	
 
import mercurial.scmutil
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.exceptions import UserCreationError
 
from kallithea.lib.utils import action_logger, make_ui
 
from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str
 
from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.model.db import Repository, User
 

	
 

	
 
def _get_scm_size(alias, root_path):
 
    if not alias.startswith('.'):
 
        alias += '.'
 

	
 
    size_scm, size_root = 0, 0
 
    for path, dirs, files in os.walk(safe_str(root_path)):
 
    for path, dirs, files in os.walk(root_path):
 
        if path.find(alias) != -1:
 
            for f in files:
 
                try:
 
                    size_scm += os.path.getsize(os.path.join(path, f))
 
                except OSError:
 
                    pass
 
        else:
 
            for f in files:
 
                try:
 
                    size_root += os.path.getsize(os.path.join(path, f))
 
                except OSError:
 
                    pass
 

	
 
    size_scm_f = h.format_byte_size(size_scm)
 
    size_root_f = h.format_byte_size(size_root)
 
    size_total_f = h.format_byte_size(size_root + size_scm)
 

	
 
    return size_scm_f, size_root_f, size_total_f
 

	
 

	
 
def repo_size(ui, repo, hooktype=None, **kwargs):
 
    """Show size of Mercurial repository, to be called after push."""
 
    size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
 

	
 
@@ -297,50 +297,49 @@ def _hook_environment(repo_path):
 
    """
 
    Create a light-weight environment for stand-alone scripts and return an UI and the
 
    db repository.
 

	
 
    Git hooks are executed as subprocess of Git while Kallithea is waiting, and
 
    they thus need enough info to be able to create an app environment and
 
    connect to the database.
 
    """
 
    import paste.deploy
 
    import kallithea.config.middleware
 

	
 
    extras = get_hook_environment()
 

	
 
    path_to_ini_file = extras['config']
 
    kallithea.CONFIG = paste.deploy.appconfig('config:' + path_to_ini_file)
 
    #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging
 
    kallithea.config.middleware.make_app(kallithea.CONFIG.global_conf, **kallithea.CONFIG.local_conf)
 

	
 
    # fix if it's not a bare repo
 
    if repo_path.endswith(os.sep + '.git'):
 
        repo_path = repo_path[:-5]
 

	
 
    repo = Repository.get_by_full_path(repo_path)
 
    if not repo:
 
        raise OSError('Repository %s not found in database'
 
                      % (safe_str(repo_path)))
 
        raise OSError('Repository %s not found in database' % repo_path)
 

	
 
    baseui = make_ui()
 
    return baseui, repo
 

	
 

	
 
def handle_git_pre_receive(repo_path, git_stdin_lines):
 
    """Called from Git pre-receive hook"""
 
    # Currently unused. TODO: remove?
 
    return 0
 

	
 

	
 
def handle_git_post_receive(repo_path, git_stdin_lines):
 
    """Called from Git post-receive hook"""
 
    try:
 
        baseui, repo = _hook_environment(repo_path)
 
    except HookEnvironmentError as e:
 
        sys.stderr.write("Skipping Kallithea Git post-recieve hook %r.\nGit was apparently not invoked by Kallithea: %s\n" % (sys.argv[0], e))
 
        return 0
 

	
 
    # the post push hook should never use the cached instance
 
    scm_repo = repo.scm_instance_no_cache()
 

	
 
    rev_data = []
 
    for l in git_stdin_lines:
 
@@ -376,26 +375,26 @@ def handle_git_post_receive(repo_path, g
 
                stdout = scm_repo.run_git_command(cmd)
 
                git_revs += stdout.splitlines()
 

	
 
            elif push_ref['new_rev'] == EmptyChangeset().raw_id:
 
                # delete branch case
 
                git_revs += ['delete_branch=>%s' % push_ref['name']]
 
            else:
 
                cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
 
                       '--reverse', '--pretty=format:%H']
 
                stdout = scm_repo.run_git_command(cmd)
 
                git_revs += stdout.splitlines()
 

	
 
        elif _type == 'tags':
 
            git_revs += ['tag=>%s' % push_ref['name']]
 

	
 
    process_pushed_raw_ids(git_revs)
 

	
 
    return 0
 

	
 

	
 
# Almost exactly like Mercurial contrib/hg-ssh:
 
def rejectpush(ui, **kwargs):
 
    """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos"""
 
    ex = get_hook_environment()
 
    ui.warn(safe_bytes("Push access to %r denied\n" % safe_str(ex.repository)))
 
    ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository))
 
    return 1
kallithea/lib/indexers/daemon.py
Show inline comments
 
@@ -18,49 +18,49 @@ kallithea.lib.indexers.daemon
 
A daemon will read from task table and run tasks
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 26, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 
import os
 
import sys
 
import traceback
 
from os.path import dirname
 
from shutil import rmtree
 
from time import mktime
 

	
 
from whoosh.index import create_in, exists_in, open_dir
 
from whoosh.qparser import QueryParser
 

	
 
from kallithea.config.conf import INDEX_EXTENSIONS, INDEX_FILENAMES
 
from kallithea.lib.indexers import CHGSET_IDX_NAME, CHGSETS_SCHEMA, IDX_NAME, SCHEMA
 
from kallithea.lib.utils2 import safe_str, safe_unicode
 
from kallithea.lib.utils2 import safe_unicode
 
from kallithea.lib.vcs.exceptions import ChangesetError, NodeDoesNotExistError, RepositoryError
 
from kallithea.model.db import Repository
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
# Add location of top level folder to sys.path
 
project_path = dirname(dirname(dirname(dirname(os.path.realpath(__file__)))))
 
sys.path.append(project_path)
 

	
 

	
 

	
 

	
 
log = logging.getLogger('whoosh_indexer')
 

	
 

	
 
class WhooshIndexingDaemon(object):
 
    """
 
    Daemon for atomic indexing jobs
 
    """
 

	
 
    def __init__(self, indexname=IDX_NAME, index_location=None,
 
                 repo_location=None, repo_list=None,
 
                 repo_update_list=None):
 
        self.indexname = indexname
 
@@ -111,70 +111,67 @@ class WhooshIndexingDaemon(object):
 
    def _get_index_revision(self, repo):
 
        db_repo = Repository.get_by_repo_name(repo.name)
 
        landing_rev = 'tip'
 
        if db_repo:
 
            _rev_type, _rev = db_repo.landing_rev
 
            landing_rev = _rev
 
        return landing_rev
 

	
 
    def _get_index_changeset(self, repo, index_rev=None):
 
        if not index_rev:
 
            index_rev = self._get_index_revision(repo)
 
        cs = repo.get_changeset(index_rev)
 
        return cs
 

	
 
    def get_paths(self, repo):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 
        """
 
        index_paths_ = set()
 
        try:
 
            cs = self._get_index_changeset(repo)
 
            for _topnode, _dirs, files in cs.walk('/'):
 
                for f in files:
 
                    index_paths_.add(os.path.join(safe_str(repo.path), safe_str(f.path)))
 
                    index_paths_.add(os.path.join(repo.path, f.path))
 

	
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            pass
 
        return index_paths_
 

	
 
    def get_node(self, repo, path, index_rev=None):
 
        """
 
        gets a filenode based on given full path. It operates on string for
 
        hg git compatibility.
 
        gets a filenode based on given full path.
 

	
 
        :param repo: scm repo instance
 
        :param path: full path including root location
 
        :return: FileNode
 
        """
 
        # FIXME: paths should be normalized ... or even better: don't include repo.path
 
        path = safe_str(path)
 
        repo_path = safe_str(repo.path)
 
        assert path.startswith(repo_path)
 
        assert path[len(repo_path)] in (os.path.sep, os.path.altsep)
 
        node_path = path[len(repo_path) + 1:]
 
        assert path.startswith(repo.path)
 
        assert path[len(repo.path)] in (os.path.sep, os.path.altsep)
 
        node_path = path[len(repo.path) + 1:]
 
        cs = self._get_index_changeset(repo, index_rev=index_rev)
 
        node = cs.get_node(node_path)
 
        return node
 

	
 
    def is_indexable_node(self, node):
 
        """
 
        Just index the content of chosen files, skipping binary files
 
        """
 
        return (node.extension in INDEX_EXTENSIONS or node.name in INDEX_FILENAMES) and \
 
               not node.is_binary
 

	
 
    def get_node_mtime(self, node):
 
        return mktime(node.last_changeset.date.timetuple())
 

	
 
    def add_doc(self, writer, path, repo, repo_name, index_rev=None):
 
        """
 
        Adding doc to writer this function itself fetches data from
 
        the instance of vcs backend
 
        """
 
        try:
 
            node = self.get_node(repo, path, index_rev)
 
        except (ChangesetError, NodeDoesNotExistError):
 
            log.debug("    >> %s - not found in %s %s", path, repo, index_rev)
 
            return 0, 0
kallithea/lib/middleware/simplehg.py
Show inline comments
 
@@ -15,49 +15,49 @@
 
kallithea.lib.middleware.simplehg
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
SimpleHg middleware for handling Mercurial protocol requests (push/clone etc.).
 
It's implemented with basic auth function
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 28, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 

	
 
import logging
 
import os
 
import urllib.parse
 

	
 
import mercurial.hgweb
 

	
 
from kallithea.lib.base import BaseVCSController, get_path_info
 
from kallithea.lib.utils import make_ui
 
from kallithea.lib.utils2 import safe_bytes, safe_str
 
from kallithea.lib.utils2 import safe_bytes
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def get_header_hgarg(environ):
 
    """Decode the special Mercurial encoding of big requests over multiple headers.
 
    >>> get_header_hgarg({})
 
    ''
 
    >>> get_header_hgarg({'HTTP_X_HGARG_0': ' ', 'HTTP_X_HGARG_1': 'a','HTTP_X_HGARG_2': '','HTTP_X_HGARG_3': 'b+c %20'})
 
    'ab+c %20'
 
    """
 
    chunks = []
 
    i = 1
 
    while True:
 
        v = environ.get('HTTP_X_HGARG_%d' % i)
 
        if v is None:
 
            break
 
        chunks.append(v)
 
        i += 1
 
    return ''.join(chunks)
 

	
 

	
 
cmd_mapping = {
 
@@ -116,34 +116,34 @@ class SimpleHg(BaseVCSController):
 
                    cmd = parts[1]
 
                    if cmd == 'batch':
 
                        hgarg = get_header_hgarg(environ)
 
                        if not hgarg.startswith('cmds='):
 
                            action = 'push' # paranoid and safe
 
                            break
 
                        action = 'pull'
 
                        for cmd_arg in hgarg[5:].split(';'):
 
                            cmd, _args = urllib.parse.unquote_plus(cmd_arg).split(' ', 1)
 
                            op = cmd_mapping.get(cmd, 'push')
 
                            if op != 'pull':
 
                                assert op == 'push'
 
                                action = 'push'
 
                                break
 
                    else:
 
                        action = cmd_mapping.get(cmd, 'push')
 
                    break # only process one cmd
 

	
 
        return parsed_request
 

	
 
    def _make_app(self, parsed_request):
 
        """
 
        Make an hgweb wsgi application.
 
        """
 
        str_repo_name = safe_str(parsed_request.repo_name)
 
        repo_path = os.path.join(safe_str(self.basepath), str_repo_name)
 
        repo_name = parsed_request.repo_name
 
        repo_path = os.path.join(self.basepath, repo_name)
 
        baseui = make_ui(repo_path=repo_path)
 
        hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=str_repo_name, baseui=baseui)
 
        hgweb_app = mercurial.hgweb.hgweb(safe_bytes(repo_path), name=safe_bytes(repo_name), baseui=baseui)
 

	
 
        def wrapper_app(environ, start_response):
 
            environ['REPO_NAME'] = str_repo_name # used by mercurial.hgweb.hgweb
 
            environ['REPO_NAME'] = repo_name # used by mercurial.hgweb.hgweb
 
            return hgweb_app(environ, start_response)
 

	
 
        return wrapper_app
kallithea/lib/utils.py
Show inline comments
 
@@ -19,49 +19,49 @@ Utilities library for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 18, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import datetime
 
import logging
 
import os
 
import re
 
import sys
 
import traceback
 
from distutils.version import StrictVersion
 

	
 
import beaker.cache
 
import mercurial.config
 
import mercurial.ui
 
from tg.i18n import ugettext as _
 

	
 
import kallithea.config.conf
 
from kallithea.lib.exceptions import HgsubversionImportError
 
from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes, safe_str
 
from kallithea.lib.utils2 import ascii_bytes, aslist, get_current_authuser, safe_bytes
 
from kallithea.lib.vcs.backends.git.repository import GitRepository
 
from kallithea.lib.vcs.backends.hg.repository import MercurialRepository
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import RepositoryError, VCSError
 
from kallithea.lib.vcs.utils.fakemod import create_module
 
from kallithea.lib.vcs.utils.helpers import get_scm
 
from kallithea.model import meta
 
from kallithea.model.db import RepoGroup, Repository, Setting, Ui, User, UserGroup, UserLog
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}_.*')
 

	
 

	
 
#==============================================================================
 
# PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
 
#==============================================================================
 
def get_repo_slug(request):
 
    _repo = request.environ['pylons.routes_dict'].get('repo_name')
 
    if _repo:
 
        _repo = _repo.rstrip('/')
 
    return _repo
 

	
 
@@ -153,49 +153,49 @@ def action_logger(user, action, repo, ip
 
    user_log.action = action
 

	
 
    user_log.repository = repo_obj
 
    user_log.repository_name = repo_name
 

	
 
    user_log.action_date = datetime.datetime.now()
 
    user_log.user_ip = ipaddr
 
    meta.Session().add(user_log)
 

	
 
    log.info('Logging action:%s on %s by user:%s ip:%s',
 
             action, repo, user_obj, ipaddr)
 
    if commit:
 
        meta.Session().commit()
 

	
 

	
 
def get_filesystem_repos(path):
 
    """
 
    Scans given path for repos and return (name,(type,path)) tuple
 

	
 
    :param path: path to scan for repositories
 
    :param recursive: recursive search and return names with subdirs in front
 
    """
 

	
 
    # remove ending slash for better results
 
    path = safe_str(path.rstrip(os.sep))
 
    path = path.rstrip(os.sep)
 
    log.debug('now scanning in %s', path)
 

	
 
    def isdir(*n):
 
        return os.path.isdir(os.path.join(*n))
 

	
 
    for root, dirs, _files in os.walk(path):
 
        recurse_dirs = []
 
        for subdir in dirs:
 
            # skip removed repos
 
            if REMOVED_REPO_PAT.match(subdir):
 
                continue
 

	
 
            # skip .<something> dirs TODO: rly? then we should prevent creating them ...
 
            if subdir.startswith('.'):
 
                continue
 

	
 
            cur_path = os.path.join(root, subdir)
 
            if isdir(cur_path, '.git'):
 
                log.warning('ignoring non-bare Git repo: %s', cur_path)
 
                continue
 

	
 
            if (isdir(cur_path, '.hg') or
 
                isdir(cur_path, '.svn') or
 
                isdir(cur_path, 'objects') and (isdir(cur_path, 'refs') or
 
@@ -248,67 +248,67 @@ def is_valid_repo_uri(repo_type, url, ui
 
            # initially check if it's at least the proper URL
 
            # or does it pass basic auth
 
            GitRepository._check_url(url)
 
        elif url.startswith('svn+http'):
 
            raise NotImplementedError()
 
        elif url.startswith('hg+http'):
 
            raise NotImplementedError()
 
        else:
 
            raise Exception('URI %s not allowed' % (url))
 

	
 

	
 
def is_valid_repo(repo_name, base_path, scm=None):
 
    """
 
    Returns True if given path is a valid repository False otherwise.
 
    If scm param is given also compare if given scm is the same as expected
 
    from scm parameter
 

	
 
    :param repo_name:
 
    :param base_path:
 
    :param scm:
 

	
 
    :return True: if given path is a valid repository
 
    """
 
    # TODO: paranoid security checks?
 
    full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
 
    full_path = os.path.join(base_path, repo_name)
 

	
 
    try:
 
        scm_ = get_scm(full_path)
 
        if scm:
 
            return scm_[0] == scm
 
        return True
 
    except VCSError:
 
        return False
 

	
 

	
 
def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
 
    """
 
    Returns True if given path is a repository group False otherwise
 

	
 
    :param repo_name:
 
    :param base_path:
 
    """
 
    full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
 
    full_path = os.path.join(base_path, repo_group_name)
 

	
 
    # check if it's not a repo
 
    if is_valid_repo(repo_group_name, base_path):
 
        return False
 

	
 
    try:
 
        # we need to check bare git repos at higher level
 
        # since we might match branches/hooks/info/objects or possible
 
        # other things inside bare git repo
 
        get_scm(os.path.dirname(full_path))
 
        return False
 
    except VCSError:
 
        pass
 

	
 
    # check if it's a valid path
 
    if skip_path_check or os.path.isdir(full_path):
 
        return True
 

	
 
    return False
 

	
 

	
 
# propagated from mercurial documentation
 
ui_sections = ['alias', 'auth',
 
                'decode/encode', 'defaults',
kallithea/lib/utils2.py
Show inline comments
 
@@ -308,49 +308,49 @@ def uri_filter(uri):
 
def credentials_filter(uri):
 
    """
 
    Returns a url with removed credentials
 

	
 
    :param uri:
 
    """
 

	
 
    uri = uri_filter(uri)
 
    # check if we have port
 
    if len(uri) > 2 and uri[2]:
 
        uri[2] = ':' + uri[2]
 

	
 
    return ''.join(uri)
 

	
 

	
 
def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None):
 
    parsed_url = urlobject.URLObject(prefix_url)
 
    prefix = urllib.parse.unquote(parsed_url.path.rstrip('/'))
 
    try:
 
        system_user = pwd.getpwuid(os.getuid()).pw_name
 
    except Exception: # TODO: support all systems - especially Windows
 
        system_user = 'kallithea' # hardcoded default value ...
 
    args = {
 
        'scheme': parsed_url.scheme,
 
        'user': urllib.parse.quote(safe_str(username or '')),
 
        'user': urllib.parse.quote(username or ''),
 
        'netloc': parsed_url.netloc + prefix,  # like "hostname:port/prefix" (with optional ":port" and "/prefix")
 
        'prefix': prefix, # undocumented, empty or starting with /
 
        'repo': repo_name,
 
        'repoid': str(repo_id),
 
        'system_user': system_user,
 
        'hostname': parsed_url.hostname,
 
    }
 
    url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl)
 

	
 
    # remove leading @ sign if it's present. Case of empty user
 
    url_obj = urlobject.URLObject(url)
 
    if not url_obj.username:
 
        url_obj = url_obj.with_username(None)
 

	
 
    return str(url_obj)
 

	
 

	
 
def get_changeset_safe(repo, rev):
 
    """
 
    Safe version of get_changeset if this changeset doesn't exists for a
 
    repo it returns a Dummy one instead
 

	
 
    :param repo:
 
    :param rev:
 
@@ -540,49 +540,49 @@ class Optional(object):
 
    def getval(self):
 
        """
 
        returns value from this Optional instance
 
        """
 
        if isinstance(self.type_, OAttr):
 
            # use params name
 
            return self.type_.attr_name
 
        return self.type_
 

	
 
    @classmethod
 
    def extract(cls, val):
 
        """
 
        Extracts value from Optional() instance
 

	
 
        :param val:
 
        :return: original value if it's not Optional instance else
 
            value of instance
 
        """
 
        if isinstance(val, cls):
 
            return val.getval()
 
        return val
 

	
 

	
 
def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub):
 
    return _cleanstringsub('_', safe_str(s)).rstrip('_')
 
    return _cleanstringsub('_', s).rstrip('_')
 

	
 

	
 
def recursive_replace(str_, replace=' '):
 
    """
 
    Recursive replace of given sign to just one instance
 

	
 
    :param str_: given string
 
    :param replace: char to find and replace multiple instances
 

	
 
    Examples::
 
    >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
 
    'Mighty-Mighty-Bo-sstones'
 
    """
 

	
 
    if str_.find(replace * 2) == -1:
 
        return str_
 
    else:
 
        str_ = str_.replace(replace * 2, replace)
 
        return recursive_replace(str_, replace)
 

	
 

	
 
def repo_name_slug(value):
 
    """
 
    Return slug of name of repository
kallithea/lib/vcs/backends/git/changeset.py
Show inline comments
 
import re
 
from io import BytesIO
 
from itertools import chain
 
from subprocess import PIPE, Popen
 

	
 
from dulwich import objects
 
from dulwich.config import ConfigFile
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, RepositoryError, VCSError
 
from kallithea.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_str, safe_unicode
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_unicode
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class GitChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at a revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self._stat_modes = {}
 
        self.repository = repository
 
        revision = safe_str(revision)
 
        try:
 
            commit = self.repository._repo[ascii_bytes(revision)]
 
            if isinstance(commit, objects.Tag):
 
                revision = safe_str(commit.object[1])
 
                commit = self.repository._repo.get_object(commit.object[1])
 
        except KeyError:
 
            raise RepositoryError("Cannot get object with id %s" % revision)
 
        self.raw_id = ascii_str(commit.id)
 
        self.short_id = self.raw_id[:12]
 
        self._commit = commit  # a Dulwich Commmit with .id
 
        self._tree_id = commit.tree
 
        self._committer_property = 'committer'
 
        self._author_property = 'author'
 
        self._date_property = 'commit_time'
 
        self._date_tz_property = 'commit_timezone'
 
        self.revision = repository.revisions.index(self.raw_id)
 

	
 
        self.nodes = {}
 
        self._paths = {}
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        return ()
 

	
 
@@ -88,49 +87,48 @@ class GitChangeset(BaseChangeset):
 
    def branch(self):
 
        # Note: This function will return one branch name for the changeset -
 
        # that might not make sense in Git where branches() is a better match
 
        # for the basic model
 
        heads = self.repository._heads(reverse=False)
 
        ref = heads.get(self._commit.id)
 
        if ref:
 
            return safe_unicode(ref)
 

	
 
    @LazyProperty
 
    def branches(self):
 
        heads = self.repository._heads(reverse=True)
 
        return [b for b in heads if heads[b] == self._commit.id] # FIXME: Inefficient ... and returning None!
 

	
 
    def _fix_path(self, path):
 
        """
 
        Paths are stored without trailing slash so we need to get rid off it if
 
        needed.
 
        """
 
        if path.endswith('/'):
 
            path = path.rstrip('/')
 
        return path
 

	
 
    def _get_id_for_path(self, path):
 
        path = safe_str(path)
 
        # FIXME: Please, spare a couple of minutes and make those codes cleaner;
 
        if path not in self._paths:
 
            path = path.strip('/')
 
            # set root tree
 
            tree = self.repository._repo[self._tree_id]
 
            if path == '':
 
                self._paths[''] = tree.id
 
                return tree.id
 
            splitted = path.split('/')
 
            dirs, name = splitted[:-1], splitted[-1]
 
            curdir = ''
 

	
 
            # initially extract things from root dir
 
            for item, stat, id in tree.items():
 
                if curdir:
 
                    name = '/'.join((curdir, item))
 
                else:
 
                    name = item
 
                self._paths[name] = id
 
                self._stat_modes[name] = stat
 

	
 
            for dir in dirs:
 
                if curdir:
 
                    curdir = '/'.join((curdir, dir))
 
@@ -138,49 +136,49 @@ class GitChangeset(BaseChangeset):
 
                    curdir = dir
 
                dir_id = None
 
                for item, stat, id in tree.items():
 
                    if dir == item:
 
                        dir_id = id
 
                if dir_id:
 
                    # Update tree
 
                    tree = self.repository._repo[dir_id]
 
                    if not isinstance(tree, objects.Tree):
 
                        raise ChangesetError('%s is not a directory' % curdir)
 
                else:
 
                    raise ChangesetError('%s have not been found' % curdir)
 

	
 
                # cache all items from the given traversed tree
 
                for item, stat, id in tree.items():
 
                    if curdir:
 
                        name = '/'.join((curdir, item))
 
                    else:
 
                        name = item
 
                    self._paths[name] = id
 
                    self._stat_modes[name] = stat
 
            if path not in self._paths:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path '%s' at revision %s"
 
                    % (path, safe_str(self.short_id)))
 
                    % (path, self.short_id))
 
        return self._paths[path]
 

	
 
    def _get_kind(self, path):
 
        obj = self.repository._repo[self._get_id_for_path(path)]
 
        if isinstance(obj, objects.Blob):
 
            return NodeKind.FILE
 
        elif isinstance(obj, objects.Tree):
 
            return NodeKind.DIR
 

	
 
    def _get_filectx(self, path):
 
        path = self._fix_path(path)
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return path
 

	
 
    def _get_file_nodes(self):
 
        return chain(*(t[2] for t in self.walk()))
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
@@ -231,93 +229,91 @@ class GitChangeset(BaseChangeset):
 
                prev_ = cs.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self, ignore_whitespace=True, context=3):
 
        # Only used to feed diffstat
 
        rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
 
        rev2 = self
 
        return b''.join(self.repository.get_diff(rev1, rev2,
 
                                    ignore_whitespace=ignore_whitespace,
 
                                    context=context))
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        # ensure path is traversed
 
        path = safe_str(path)
 
        self._get_id_for_path(path)
 
        return self._stat_modes[path]
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.as_pretty_string()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.raw_length()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        return self.get_file_history(path, limit=1)[0]
 

	
 
    def get_file_history(self, path, limit=None):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        TODO: This function now uses os underlying 'git' and 'grep' commands
 
        which is generally not good. Should be replaced with algorithm
 
        iterating commits.
 
        """
 
        self._get_filectx(path)
 
        f_path = safe_str(path)
 

	
 
        if limit is not None:
 
            cmd = ['log', '-n', str(safe_int(limit, 0)),
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', f_path]
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 

	
 
        else:
 
            cmd = ['log',
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', f_path]
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 
        so = self.repository.run_git_command(cmd)
 
        ids = re.findall(r'[0-9a-fA-F]{40}', so)
 
        return [self.repository.get_changeset(sha) for sha in ids]
 

	
 
    def get_file_history_2(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        """
 
        self._get_filectx(path)
 
        from dulwich.walk import Walker
 
        include = [self.raw_id]
 
        walker = Walker(self.repository._repo.object_store, include,
 
                        paths=[path], max_entries=1)
 
        return [self.repository.get_changeset(ascii_str(x.commit.id.decode))
 
                for x in walker]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 
        """
 
        # TODO: This function now uses os underlying 'git' command which is
kallithea/lib/vcs/backends/git/repository.py
Show inline comments
 
@@ -9,49 +9,49 @@
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import errno
 
import logging
 
import os
 
import re
 
import time
 
import urllib.error
 
import urllib.parse
 
import urllib.request
 
from collections import OrderedDict
 

	
 
import mercurial.url  # import httpbasicauthhandler, httpdigestauthhandler
 
import mercurial.util  # import url as hg_url
 
from dulwich.config import ConfigFile
 
from dulwich.objects import Tag
 
from dulwich.repo import NotGitRepository, Repo
 

	
 
from kallithea.lib.vcs import subprocessio
 
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError)
 
from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str, safe_unicode
 
from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_unicode
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import abspath, get_user_home
 

	
 
from .changeset import GitChangeset
 
from .inmemory import GitInMemoryChangeset
 
from .workdir import GitWorkdir
 

	
 

	
 
SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitRepository(BaseRepository):
 
    """
 
    Git repository backend.
 
    """
 
    DEFAULT_BRANCH_NAME = 'master'
 
    scm = 'git'
 

	
 
    def __init__(self, repo_path, create=False, src_url=None,
 
                 update_after_clone=False, bare=False):
 

	
 
        self.path = abspath(repo_path)
 
@@ -296,49 +296,48 @@ class GitRepository(BaseRepository):
 

	
 
            if SHA_PATTERN.match(revision):
 
                msg = "Revision %r does not exist for %s" % (revision, self.name)
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision)
 

	
 
    def get_ref_revision(self, ref_type, ref_name):
 
        """
 
        Returns ``GitChangeset`` object representing repository's
 
        changeset at the given ``revision``.
 
        """
 
        return self._get_revision(ref_name)
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
            yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        url = safe_str(url)
 
        if url != 'default' and '://' not in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            idx_loc = '' if self.bare else '.git'
 
            # fallback to filesystem
 
            in_path = os.path.join(self.path, idx_loc, "index")
 
            he_path = os.path.join(self.path, idx_loc, "HEAD")
 
            if os.path.exists(in_path):
kallithea/lib/vcs/backends/git/ssh.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import logging
 
import os
 

	
 
from kallithea.lib.hooks import log_pull_action
 
from kallithea.lib.utils import make_ui
 
from kallithea.lib.vcs.backends.ssh import BaseSshHandler
 
from kallithea.lib.vcs.utils import safe_str
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitSshHandler(BaseSshHandler):
 
    vcs_type = 'git'
 

	
 
    @classmethod
 
    def make(cls, ssh_command_parts):
 
        r"""
 
        >>> import shlex
 

	
 
        >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).repo_name
 
        u'foo bar'
 
        >>> GitSshHandler.make(shlex.split("git-upload-pack '/foo bar'")).verb
 
        'git-upload-pack'
 
        >>> GitSshHandler.make(shlex.split(" git-upload-pack /blåbærgrød ")).repo_name # might not be necessary to support no quoting ... but we can
 
        u'bl\xe5b\xe6rgr\xf8d'
 
        >>> GitSshHandler.make(shlex.split('''git-upload-pack "/foo'bar"''')).repo_name
 
        u"foo'bar"
 
        >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).repo_name
 
        u'foo'
 
        >>> GitSshHandler.make(shlex.split("git-receive-pack '/foo'")).verb
 
@@ -49,35 +48,35 @@ class GitSshHandler(BaseSshHandler):
 
        >>> GitSshHandler.make(shlex.split('''git-upload-pack /foo bar''')) # ssh-serve will report 'SSH command %r is not supported'
 
        >>> shlex.split("git-upload-pack '/foo'bar' x") # ssh-serve will report: Error parsing SSH command "...": No closing quotation
 
        Traceback (most recent call last):
 
        ValueError: No closing quotation
 
        >>> GitSshHandler.make(shlex.split('hg -R foo serve --stdio')) # not handled here
 
        """
 
        if (len(ssh_command_parts) == 2 and
 
            ssh_command_parts[0] in ['git-upload-pack', 'git-receive-pack'] and
 
            ssh_command_parts[1].startswith('/')
 
        ):
 
            return cls(ssh_command_parts[1][1:], ssh_command_parts[0])
 

	
 
        return None
 

	
 
    def __init__(self, repo_name, verb):
 
        BaseSshHandler.__init__(self, repo_name)
 
        self.verb = verb
 

	
 
    def _serve(self):
 
        if self.verb == 'git-upload-pack': # action 'pull'
 
            # base class called set_hook_environment - action is hardcoded to 'pull'
 
            log_pull_action(ui=make_ui(), repo=self.db_repo.scm_instance._repo)
 
        else: # probably verb 'git-receive-pack', action 'push'
 
            if not self.allow_push:
 
                self.exit('Push access to %r denied' % safe_str(self.repo_name))
 
                self.exit('Push access to %r denied' % self.repo_name)
 
            # Note: push logging is handled by Git post-receive hook
 

	
 
        # git shell is not a real shell but use shell inspired quoting *inside* the argument.
 
        # Per https://github.com/git/git/blob/v2.22.0/quote.c#L12 :
 
        # The path must be "'" quoted, but "'" and "!" must exit the quoting and be "\" escaped
 
        quoted_abspath = "'%s'" % self.db_repo.repo_full_path.replace("'", r"'\''").replace("!", r"'\!'")
 
        newcmd = ['git', 'shell', '-c', "%s %s" % (self.verb, quoted_abspath)]
 
        log.debug('Serving: %s', newcmd)
 
        os.execvp(newcmd[0], newcmd)
 
        self.exit("Failed to exec 'git' as %s" % newcmd)
kallithea/lib/vcs/backends/hg/changeset.py
Show inline comments
 
import os
 
import posixpath
 

	
 
import mercurial.archival
 
import mercurial.node
 
import mercurial.obsutil
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError
 
from kallithea.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_bytes, safe_str, safe_unicode
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_bytes, safe_unicode
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import get_dirs_for_path
 

	
 

	
 
class MercurialChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at a revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self.repository = repository
 
        assert isinstance(revision, str), repr(revision)
 
        self._ctx = repository._repo[ascii_bytes(revision)]
 
        self.raw_id = ascii_str(self._ctx.hex())
 
        self.revision = self._ctx._rev
 
        self.nodes = {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return [safe_unicode(tag) for tag in self._ctx.tags()]
 

	
 
    @LazyProperty
 
    def branch(self):
 
        return safe_unicode(self._ctx.branch())
 
@@ -181,49 +181,49 @@ class MercurialChangeset(BaseChangeset):
 
                prev_ = cs.repository.revisions.index(cs.raw_id) - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self):
 
        # Only used to feed diffstat
 
        return b''.join(self._ctx.diff())
 

	
 
    def _fix_path(self, path):
 
        """
 
        Paths are stored without trailing slash so we need to get rid off it if
 
        needed. Also mercurial keeps filenodes as str so we need to decode
 
        from unicode to str
 
        """
 
        if path.endswith('/'):
 
            path = path.rstrip('/')
 

	
 
        return safe_str(path)
 
        return path
 

	
 
    def _get_kind(self, path):
 
        path = self._fix_path(path)
 
        if path in self._file_paths:
 
            return NodeKind.FILE
 
        elif path in self._dir_paths:
 
            return NodeKind.DIR
 
        else:
 
            raise ChangesetError("Node does not exist at the given path '%s'"
 
                % (path))
 

	
 
    def _get_filectx(self, path):
 
        path = self._fix_path(path)
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return self._ctx.filectx(safe_bytes(path))
 

	
 
    def _extract_submodules(self):
 
        """
 
        returns a dictionary with submodule information from substate file
 
        of hg repository
 
        """
 
        return self._ctx.substate
kallithea/lib/vcs/backends/hg/repository.py
Show inline comments
 
@@ -18,49 +18,49 @@ import urllib.parse
 
import urllib.request
 
from collections import OrderedDict
 

	
 
import mercurial.commands
 
import mercurial.error
 
import mercurial.exchange
 
import mercurial.hg
 
import mercurial.hgweb
 
import mercurial.httppeer
 
import mercurial.localrepo
 
import mercurial.match
 
import mercurial.mdiff
 
import mercurial.node
 
import mercurial.patch
 
import mercurial.scmutil
 
import mercurial.sshpeer
 
import mercurial.tags
 
import mercurial.ui
 
import mercurial.url
 
import mercurial.util
 

	
 
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
 
from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str, safe_unicode
 
from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_unicode
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import abspath
 

	
 
from .changeset import MercurialChangeset
 
from .inmemory import MercurialInMemoryChangeset
 
from .workdir import MercurialWorkdir
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class MercurialRepository(BaseRepository):
 
    """
 
    Mercurial repository backend
 
    """
 
    DEFAULT_BRANCH_NAME = 'default'
 
    scm = 'hg'
 

	
 
    def __init__(self, repo_path, create=False, baseui=None, src_url=None,
 
                 update_after_clone=False):
 
        """
 
        Raises RepositoryError if repository could not be find at the given
 
        ``repo_path``.
 

	
 
@@ -425,92 +425,89 @@ class MercurialRepository(BaseRepository
 
        """
 
        if self._empty:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in [-1, None]:
 
            revision = b'tip'
 
        elif isinstance(revision, unicode):
 
            revision = safe_bytes(revision)
 

	
 
        try:
 
            if isinstance(revision, int):
 
                return ascii_str(self._repo[revision].hex())
 
            return ascii_str(mercurial.scmutil.revsymbol(self._repo, revision).hex())
 
        except (IndexError, ValueError, mercurial.error.RepoLookupError, TypeError):
 
            msg = "Revision %r does not exist for %s" % (safe_unicode(revision), self.name)
 
            raise ChangesetDoesNotExistError(msg)
 
        except (LookupError, ):
 
            msg = "Ambiguous identifier `%s` for %s" % (safe_unicode(revision), self.name)
 
            raise ChangesetDoesNotExistError(msg)
 

	
 
    def get_ref_revision(self, ref_type, ref_name):
 
        """
 
        Returns revision number for the given reference.
 
        """
 
        ref_name = safe_str(ref_name)
 
        if ref_type == 'rev' and not ref_name.strip('0'):
 
            return self.EMPTY_CHANGESET
 
        # lookup up the exact node id
 
        _revset_predicates = {
 
                'branch': 'branch',
 
                'book': 'bookmark',
 
                'tag': 'tag',
 
                'rev': 'id',
 
            }
 
        # avoid expensive branch(x) iteration over whole repo
 
        rev_spec = "%%s & %s(%%s)" % _revset_predicates[ref_type]
 
        try:
 
            revs = self._repo.revs(rev_spec, ref_name, ref_name)
 
        except LookupError:
 
            msg = "Ambiguous identifier %s:%s for %s" % (ref_type, ref_name, self.name)
 
            raise ChangesetDoesNotExistError(msg)
 
        except mercurial.error.RepoLookupError:
 
            msg = "Revision %s:%s does not exist for %s" % (ref_type, ref_name, self.name)
 
            raise ChangesetDoesNotExistError(msg)
 
        if revs:
 
            revision = revs.last()
 
        else:
 
            # TODO: just report 'not found'?
 
            revision = ref_name
 

	
 
        return self._get_revision(revision)
 

	
 
    def _get_archives(self, archive_name='tip'):
 
        allowed = self.baseui.configlist(b"web", b"allow_archive",
 
                                         untrusted=True)
 
        for name, ext in [(b'zip', '.zip'), (b'gz', '.tar.gz'), (b'bz2', '.tar.bz2')]:
 
            if name in allowed or self._repo.ui.configbool(b"web",
 
                                                           b"allow" + name,
 
                                                           untrusted=True):
 
                yield {"type": name, "extension": ext, "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall
 
        to filesystem
 
        (``file:///``) schema.
 
        Returns normalized url. If schema is not given, fall back to
 
        filesystem (``file:///``) schema.
 
        """
 
        url = safe_str(url)
 
        if url != 'default' and '://' not in url:
 
            url = "file:" + urllib.request.pathname2url(url)
 
        return url
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns ``MercurialChangeset`` object representing repository's
 
        changeset at the given ``revision``.
 
        """
 
        return MercurialChangeset(repository=self, revision=self._get_revision(revision))
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
                       end_date=None, branch_name=None, reverse=False, max_revisions=None):
 
        """
 
        Returns iterator of ``MercurialChangeset`` objects from start to end
 
        (both are inclusive)
 

	
 
        :param start: None, str, int or mercurial lookup format
 
        :param end:  None, str, int or mercurial lookup format
 
        :param start_date:
 
        :param end_date:
 
        :param branch_name:
 
        :param reversed: return changesets in reversed order
 
        """
kallithea/lib/vcs/backends/ssh.py
Show inline comments
 
@@ -4,49 +4,48 @@
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
vcs.backends.ssh
 
~~~~~~~~~~~~~~~~~
 

	
 
SSH backend for all available SCMs
 
"""
 

	
 
import datetime
 
import logging
 
import sys
 

	
 
from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware
 
from kallithea.lib.utils2 import set_hook_environment
 
from kallithea.lib.vcs.utils import safe_str
 
from kallithea.model.db import Repository, User, UserSshKeys
 
from kallithea.model.meta import Session
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class BaseSshHandler(object):
 
    # Protocol for setting properties:
 
    # Set by sub class:
 
    #   vcs_type: 'hg' or 'git'
 
    # Set by make() / __init__():
 
    #   repo_name: requested repo name - only validated by serve()
 
    # Set by serve() - must not be accessed before:
 
    #   db_repo: repository db object
 
    #   authuser: user that has been authenticated - like request.authuser ... which isn't used here
 
    #   allow_push: false for read-only access to the repo
 

	
 
    # Set defaults, in case .exit should be called early
 
    vcs_type = None
 
    repo_name = None
 

	
 
    @staticmethod
 
    def make(ssh_command):
 
@@ -62,49 +61,49 @@ class BaseSshHandler(object):
 
    def serve(self, user_id, key_id, client_ip):
 
        """Verify basic sanity of the repository, and that the user is
 
        valid and has access - then serve the native VCS protocol for
 
        repository access."""
 
        dbuser = User.get(user_id)
 
        if dbuser is None:
 
            self.exit('User %r not found' % user_id)
 
        self.authuser = AuthUser.make(dbuser=dbuser, ip_addr=client_ip)
 
        log.info('Authorized user %s from SSH %s trusting user id %s and key id %s for %r', dbuser, client_ip, user_id, key_id, self.repo_name)
 
        if self.authuser is None: # not ok ... but already kind of authenticated by SSH ... but not really not authorized ...
 
            self.exit('User %s from %s cannot be authorized' % (dbuser.username, client_ip))
 

	
 
        ssh_key = UserSshKeys.get(key_id)
 
        if ssh_key is None:
 
            self.exit('SSH key %r not found' % key_id)
 
        ssh_key.last_seen = datetime.datetime.now()
 
        Session().commit()
 

	
 
        if HasPermissionAnyMiddleware('repository.write',
 
                                      'repository.admin')(self.authuser, self.repo_name):
 
            self.allow_push = True
 
        elif HasPermissionAnyMiddleware('repository.read')(self.authuser, self.repo_name):
 
            self.allow_push = False
 
        else:
 
            self.exit('Access to %r denied' % safe_str(self.repo_name))
 
            self.exit('Access to %r denied' % self.repo_name)
 

	
 
        self.db_repo = Repository.get_by_repo_name(self.repo_name)
 
        if self.db_repo is None:
 
            self.exit("Repository '%s' not found" % self.repo_name)
 
        assert self.db_repo.repo_name == self.repo_name
 

	
 
        # Set global hook environment up for 'push' actions.
 
        # If pull actions should be served, the actual hook invocation will be
 
        # hardcoded to 'pull' when log_pull_action is invoked (directly on Git,
 
        # or through the Mercurial 'outgoing' hook).
 
        # For push actions, the action in global hook environment is used (in
 
        # handle_git_post_receive when it is called as Git post-receive hook,
 
        # or in log_push_action through the Mercurial 'changegroup' hook).
 
        set_hook_environment(self.authuser.username, client_ip, self.repo_name, self.vcs_type, 'push')
 
        return self._serve()
 

	
 
    def _serve(self):
 
        """Serve the native protocol for repository access."""
 
        raise NotImplementedError
 

	
 
    def exit(self, error):
 
        log.info('abort serving %s %s: %s', self.vcs_type, self.repo_name, error)
 
        sys.stderr.write('abort: %s\n' % error)
 
        sys.exit(1)
kallithea/lib/vcs/nodes.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.nodes
 
    ~~~~~~~~~
 

	
 
    Module holding everything related to vcs nodes.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import functools
 
import mimetypes
 
import posixpath
 
import stat
 

	
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.exceptions import NodeError, RemovedFileNodeError
 
from kallithea.lib.vcs.utils import safe_bytes, safe_str, safe_unicode
 
from kallithea.lib.vcs.utils import safe_bytes, safe_unicode
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class NodeKind:
 
    SUBMODULE = -1
 
    DIR = 1
 
    FILE = 2
 

	
 

	
 
class NodeState:
 
    ADDED = u'added'
 
    CHANGED = u'changed'
 
    NOT_CHANGED = u'not changed'
 
    REMOVED = u'removed'
 

	
 

	
 
class NodeGeneratorBase(object):
 
    """
 
    Base class for removed added and changed filenodes, it's a lazy generator
 
    class that will create filenodes only on iteration or call
 

	
 
    The len method doesn't need to create filenodes at all
 
    """
 

	
 
@@ -81,49 +81,49 @@ class RemovedFileNodesGenerator(NodeGene
 
            yield RemovedFileNode(path=p)
 

	
 
    def __getitem__(self, key):
 
        assert isinstance(key, slice), key
 
        for p in self.current_paths[key]:
 
            yield RemovedFileNode(path=p)
 

	
 

	
 
@functools.total_ordering
 
class Node(object):
 
    """
 
    Simplest class representing file or directory on repository.  SCM backends
 
    should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
 
    directly.
 

	
 
    Node's ``path`` cannot start with slash as we operate on *relative* paths
 
    only. Moreover, every single node is identified by the ``path`` attribute,
 
    so it cannot end with slash, too. Otherwise, path could lead to mistakes.
 
    """
 

	
 
    def __init__(self, path, kind):
 
        if path.startswith('/'):
 
            raise NodeError("Cannot initialize Node objects with slash at "
 
                            "the beginning as only relative paths are supported")
 
        self.path = safe_str(path.rstrip('/'))  # we store paths as str
 
        self.path = path.rstrip('/')
 
        if path == '' and kind != NodeKind.DIR:
 
            raise NodeError("Only DirNode and its subclasses may be "
 
                            "initialized with empty path")
 
        self.kind = kind
 
        #self.dirs, self.files = [], []
 
        if self.is_root() and not self.is_dir():
 
            raise NodeError("Root node cannot be FILE kind")
 

	
 
    @LazyProperty
 
    def parent(self):
 
        parent_path = self.get_parent_path()
 
        if parent_path:
 
            if self.changeset:
 
                return self.changeset.get_node(parent_path)
 
            return DirNode(parent_path)
 
        return None
 

	
 
    @LazyProperty
 
    def name(self):
 
        """
 
        Returns name of the node so if its path
 
        then only last part is returned.
 
        """
 
        return self.path.rstrip('/').split('/')[-1]
 
@@ -571,39 +571,39 @@ class RootNode(DirNode):
 
    """
 

	
 
    def __init__(self, nodes=(), changeset=None):
 
        super(RootNode, self).__init__(path='', nodes=nodes,
 
            changeset=changeset)
 

	
 
    def __repr__(self):
 
        return '<%s>' % self.__class__.__name__
 

	
 

	
 
class SubModuleNode(Node):
 
    """
 
    represents a SubModule of Git or SubRepo of Mercurial
 
    """
 
    is_binary = False
 
    size = 0
 

	
 
    def __init__(self, name, url, changeset=None, alias=None):
 
        # Note: Doesn't call Node.__init__!
 
        self.path = name
 
        self.kind = NodeKind.SUBMODULE
 
        self.alias = alias
 
        # we have to use emptyChangeset here since this can point to svn/git/hg
 
        # submodules we cannot get from repository
 
        self.changeset = EmptyChangeset(str(changeset), alias=alias)
 
        self.changeset = EmptyChangeset(changeset, alias=alias)
 
        self.url = url
 

	
 
    def __repr__(self):
 
        return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
 
                                 getattr(self.changeset, 'short_id', ''))
 

	
 
    @LazyProperty
 
    def name(self):
 
        """
 
        Returns name of the node so if its path
 
        then only last part is returned.
 
        """
 
        org = self.path.rstrip('/').rsplit('/', 1)[-1]
 
        return u'%s @ %s' % (org, self.changeset.short_id)
kallithea/lib/vcs/utils/__init__.py
Show inline comments
 
@@ -183,39 +183,39 @@ email_re = re.compile(
 
    r"""(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?""",
 
    re.IGNORECASE)
 

	
 

	
 
def author_email(author):
 
    """
 
    Returns email address of given author string.
 
    If author contains <> brackets, only look inside that.
 
    If any RFC valid email address is found, return that.
 
    Else, return empty string.
 

	
 
    """
 
    if not author:
 
        return ''
 

	
 
    l = author.find('<') + 1
 
    if l != 0:
 
        r = author.find('>', l)
 
        if r != -1:
 
            author = author[l:r]
 

	
 
    m = email_re.search(author)
 
    if m is None:
 
        return ''
 
    return safe_str(m.group(0))
 
    return m.group(0)
 

	
 

	
 
def author_name(author):
 
    """
 
    get name of author, or else username.
 
    It'll try to find an email in the author string and just cut it off
 
    to get the username
 
    """
 
    if not author:
 
        return ''
 
    if '@' not in author:
 
        return author
 
    return author.replace(author_email(author), '').replace('<', '') \
 
        .replace('>', '').strip()
kallithea/model/db.py
Show inline comments
 
@@ -28,49 +28,49 @@ Original author and date, and relevant c
 
import base64
 
import collections
 
import datetime
 
import functools
 
import hashlib
 
import logging
 
import os
 
import time
 
import traceback
 

	
 
import ipaddr
 
import sqlalchemy
 
from beaker.cache import cache_region, region_invalidate
 
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, String, Unicode, UnicodeText, UniqueConstraint
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import class_mapper, joinedload, relationship, validates
 
from tg.i18n import lazy_ugettext as _
 
from webob.exc import HTTPNotFound
 

	
 
import kallithea
 
from kallithea.lib import ext_json
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.exceptions import DefaultUserException
 
from kallithea.lib.utils2 import (
 
    Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, safe_unicode, str2bool, urlreadable)
 
    Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_unicode, str2bool, urlreadable)
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.utils.helpers import get_scm
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model.meta import Base, Session
 

	
 

	
 
URL_SEP = '/'
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
def _hash_key(k):
 
    return hashlib.md5(safe_bytes(k)).hexdigest()
 

	
 

	
 
class BaseDbModel(object):
 
    """
 
    Base Model for all classes
 
    """
 

	
 
    @classmethod
 
@@ -1403,49 +1403,49 @@ class Repository(Base, BaseDbModel):
 
    _scm_instance = None
 

	
 
    @property
 
    def scm_instance(self):
 
        if self._scm_instance is None:
 
            self._scm_instance = self.scm_instance_cached()
 
        return self._scm_instance
 

	
 
    def scm_instance_cached(self, valid_cache_keys=None):
 
        @cache_region('long_term', 'scm_instance_cached')
 
        def _c(repo_name): # repo_name is just for the cache key
 
            log.debug('Creating new %s scm_instance and populating cache', repo_name)
 
            return self.scm_instance_no_cache()
 
        rn = self.repo_name
 

	
 
        valid = CacheInvalidation.test_and_set_valid(rn, None, valid_cache_keys=valid_cache_keys)
 
        if not valid:
 
            log.debug('Cache for %s invalidated, getting new object', rn)
 
            region_invalidate(_c, None, 'scm_instance_cached', rn)
 
        else:
 
            log.debug('Trying to get scm_instance of %s from cache', rn)
 
        return _c(rn)
 

	
 
    def scm_instance_no_cache(self):
 
        repo_full_path = safe_str(self.repo_full_path)
 
        repo_full_path = self.repo_full_path
 
        alias = get_scm(repo_full_path)[0]
 
        log.debug('Creating instance of %s repository from %s',
 
                  alias, self.repo_full_path)
 
        backend = get_backend(alias)
 

	
 
        if alias == 'hg':
 
            repo = backend(repo_full_path, create=False,
 
                           baseui=self._ui)
 
        else:
 
            repo = backend(repo_full_path, create=False)
 

	
 
        return repo
 

	
 
    def __json__(self):
 
        return dict(
 
            repo_id=self.repo_id,
 
            repo_name=self.repo_name,
 
            landing_rev=self.landing_rev,
 
        )
 

	
 

	
 
class RepoGroup(Base, BaseDbModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
@@ -2070,53 +2070,53 @@ class CacheInvalidation(Base, BaseDbMode
 
    def clear_cache(cls):
 
        """
 
        Delete all cache keys from database.
 
        Should only be run when all instances are down and all entries thus stale.
 
        """
 
        cls.query().delete()
 
        Session().commit()
 

	
 
    @classmethod
 
    def _get_cache_key(cls, key):
 
        """
 
        Wrapper for generating a unique cache key for this instance and "key".
 
        key must / will start with a repo_name which will be stored in .cache_args .
 
        """
 
        prefix = kallithea.CONFIG.get('instance_id', '')
 
        return "%s%s" % (prefix, key)
 

	
 
    @classmethod
 
    def set_invalidate(cls, repo_name):
 
        """
 
        Mark all caches of a repo as invalid in the database.
 
        """
 
        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 
        log.debug('for repo %s got %s invalidation objects',
 
                  safe_str(repo_name), inv_objs)
 
                  repo_name, inv_objs)
 

	
 
        for inv_obj in inv_objs:
 
            log.debug('marking %s key for invalidation based on repo_name=%s',
 
                      inv_obj, safe_str(repo_name))
 
                      inv_obj, repo_name)
 
            Session().delete(inv_obj)
 
        Session().commit()
 

	
 
    @classmethod
 
    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
 
        """
 
        Mark this cache key as active and currently cached.
 
        Return True if the existing cache registration still was valid.
 
        Return False to indicate that it had been invalidated and caches should be refreshed.
 
        """
 

	
 
        key = (repo_name + '_' + kind) if kind else repo_name
 
        cache_key = cls._get_cache_key(key)
 

	
 
        if valid_cache_keys and cache_key in valid_cache_keys:
 
            return True
 

	
 
        inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
 
        if inv_obj is None:
 
            inv_obj = cls(cache_key, repo_name)
 
            Session().add(inv_obj)
 
        elif inv_obj.cache_active:
 
            return True
 
        inv_obj.cache_active = True
 
@@ -2496,49 +2496,49 @@ class Gist(Base, BaseDbModel):
 
        """
 
        gist = self
 
        data = dict(
 
            gist_id=gist.gist_id,
 
            type=gist.gist_type,
 
            access_id=gist.gist_access_id,
 
            description=gist.gist_description,
 
            url=gist.gist_url(),
 
            expires=gist.gist_expires,
 
            created_on=gist.created_on,
 
        )
 
        return data
 

	
 
    def __json__(self):
 
        data = dict(
 
        )
 
        data.update(self.get_api_data())
 
        return data
 
    ## SCM functions
 

	
 
    @property
 
    def scm_instance(self):
 
        from kallithea.lib.vcs import get_repo
 
        base_path = self.base_path()
 
        return get_repo(os.path.join(safe_str(base_path), safe_str(self.gist_access_id)))
 
        return get_repo(os.path.join(base_path, self.gist_access_id))
 

	
 

	
 
class UserSshKeys(Base, BaseDbModel):
 
    __tablename__ = 'user_ssh_keys'
 
    __table_args__ = (
 
        Index('usk_fingerprint_idx', 'fingerprint'),
 
        UniqueConstraint('fingerprint'),
 
        _table_args_default_dict
 
    )
 
    __mapper_args__ = {}
 

	
 
    user_ssh_key_id = Column(Integer(), primary_key=True)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
 
    _public_key = Column('public_key', UnicodeText(), nullable=False)
 
    description = Column(UnicodeText(), nullable=False)
 
    fingerprint = Column(String(255), nullable=False, unique=True)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    last_seen = Column(DateTime(timezone=False), nullable=True)
 

	
 
    user = relationship('User')
 

	
 
    @property
 
    def public_key(self):
 
        return self._public_key
kallithea/model/repo.py
Show inline comments
 
@@ -18,49 +18,49 @@ kallithea.model.repo
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
import os
 
import shutil
 
import traceback
 
from datetime import datetime
 

	
 
import kallithea.lib.utils2
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.lib.hooks import log_delete_repository
 
from kallithea.lib.utils import is_valid_repo_uri, make_ui
 
from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix, safe_str
 
from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.model.db import (
 
    Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(object):
 

	
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
@@ -620,99 +620,98 @@ class RepoModel(object):
 

	
 
    def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
 
                                clone_uri=None, repo_store_location=None):
 
        """
 
        Makes repository on filesystem. Operation is group aware, meaning that it will create
 
        a repository within a group, and alter the paths accordingly to the group location.
 

	
 
        Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning
 
        """
 
        from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
 
        from kallithea.model.scm import ScmModel
 

	
 
        if '/' in repo_name:
 
            raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
 
        else:
 
            new_parent_path = repo_group or ''
 

	
 
        if repo_store_location:
 
            _paths = [repo_store_location]
 
        else:
 
            _paths = [self.repos_path, new_parent_path, repo_name]
 
            # we need to make it str for mercurial
 
        repo_path = os.path.join(*(safe_str(x) for x in _paths))
 
        repo_path = os.path.join(*_paths)
 

	
 
        # check if this path is not a repository
 
        if is_valid_repo(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid repository' % repo_path)
 

	
 
        # check if this path is a group
 
        if is_valid_repo_group(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid group' % repo_path)
 

	
 
        log.info('creating repo %s in %s from url: `%s`',
 
            repo_name, repo_path,
 
            obfuscate_url_pw(clone_uri))
 

	
 
        backend = get_backend(repo_type)
 

	
 
        if repo_type == 'hg':
 
            baseui = make_ui()
 
            # patch and reset hooks section of UI config to not run any
 
            # hooks on creating remote repo
 
            for k, v in baseui.configitems('hooks'):
 
                baseui.setconfig('hooks', k, None)
 

	
 
            repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui)
 
        elif repo_type == 'git':
 
            repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
 
            # add kallithea hook into this repo
 
            ScmModel().install_git_hooks(repo=repo)
 
        else:
 
            raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 

	
 
        log.debug('Created repo %s with %s backend',
 
                  repo_name, repo_type)
 
        return repo
 

	
 
    def _rename_filesystem_repo(self, old, new):
 
        """
 
        renames repository on filesystem
 

	
 
        :param old: old name
 
        :param new: new name
 
        """
 
        log.info('renaming repo from %s to %s', old, new)
 

	
 
        old_path = safe_str(os.path.join(self.repos_path, old))
 
        new_path = safe_str(os.path.join(self.repos_path, new))
 
        old_path = os.path.join(self.repos_path, old)
 
        new_path = os.path.join(self.repos_path, new)
 
        if os.path.isdir(new_path):
 
            raise Exception(
 
                'Was trying to rename to already existing dir %s' % new_path
 
            )
 
        shutil.move(old_path, new_path)
 

	
 
    def _delete_filesystem_repo(self, repo):
 
        """
 
        removes repo from filesystem, the removal is actually done by
 
        renaming dir to a 'rm__*' prefix which Kallithea will skip.
 
        It can be undeleted later by reverting the rename.
 

	
 
        :param repo: repo object
 
        """
 
        rm_path = safe_str(os.path.join(self.repos_path, repo.repo_name))
 
        rm_path = os.path.join(self.repos_path, repo.repo_name)
 
        log.info("Removing %s", rm_path)
 

	
 
        _now = datetime.now()
 
        _ms = str(_now.microsecond).rjust(6, '0')
 
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
 
                             repo.just_name)
 
        if repo.group:
 
            args = repo.group.full_path_splitted + [_d]
 
            _d = os.path.join(*args)
 
        if os.path.exists(rm_path):
 
            shutil.move(rm_path, safe_str(os.path.join(self.repos_path, _d)))
 
            shutil.move(rm_path, os.path.join(self.repos_path, _d))
 
        else:
 
            log.error("Can't find repo to delete in %r", rm_path)
kallithea/model/scm.py
Show inline comments
 
@@ -20,49 +20,49 @@ Scm model for Kallithea
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import os
 
import posixpath
 
import re
 
import sys
 
import traceback
 

	
 
import pkg_resources
 
from tg.i18n import ugettext as _
 

	
 
import kallithea
 
from kallithea import BACKENDS
 
from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel
 
from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError
 
from kallithea.lib.hooks import process_pushed_raw_ids
 
from kallithea.lib.utils import action_logger, get_filesystem_repos, make_ui
 
from kallithea.lib.utils2 import safe_bytes, safe_str, set_hook_environment
 
from kallithea.lib.utils2 import safe_bytes, set_hook_environment
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.exceptions import RepositoryError
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model.db import PullRequest, RepoGroup, Repository, Session, Ui, User, UserFollowing, UserLog
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
@@ -169,49 +169,49 @@ class ScmModel(object):
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s', repos_path)
 

	
 
        baseui = make_ui()
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for kallithea
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS:
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 
                        repos[name] = klass(path[1], baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS:
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories', len(repos))
 
        return repos
 

	
 
    def get_repos(self, repos):
 
        """Return the repos the user has access to"""
 
        return RepoList(repos, perm_level='read')
 

	
 
    def get_repo_groups(self, groups=None):
 
        """Return the repo groups the user has access to
 
        If no groups are specified, use top level groups.
 
        """
 
        if groups is None:
 
            groups = RepoGroup.query() \
 
                .filter(RepoGroup.parent_group_id == None).all()
 
        return RepoGroupList(groups, perm_level='read')
 

	
 
    def mark_for_invalidation(self, repo_name):
 
        """
 
        Mark caches of this repo invalid in the database.
 
@@ -375,55 +375,50 @@ class ScmModel(object):
 
                # TODO: extract fetched revisions ... somehow ...
 
                self._handle_push(repo,
 
                                  username=username,
 
                                  ip_addr=ip_addr,
 
                                  action='push_remote',
 
                                  repo_name=repo_name,
 
                                  revisions=[])
 
            else:
 
                set_hook_environment(username, ip_addr, dbrepo.repo_name,
 
                                           repo.alias, action='push_remote')
 
                repo.pull(clone_uri)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, ip_addr, author, message,
 
                      content, f_path):
 
        """
 
        Commit a change to a single file
 

	
 
        :param repo: a db_repo.scm_instance
 
        """
 
        user = User.guess_instance(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        imc = IMC(repo)
 
        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
 
        imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path)))
 
        try:
 
            tip = imc.commit(message=message, author=author,
 
                             parents=[cs], branch=cs.branch)
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            # clear caches - we also want a fresh object if commit fails
 
            self.mark_for_invalidation(repo_name)
 
            raise IMCCommitError(str(e))
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          ip_addr=ip_addr,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def _sanitize_path(self, f_path):
 
        if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
 
            raise NonRelativePathError('%s is not an relative path' % f_path)
 
        if f_path:
 
            f_path = posixpath.normpath(f_path)
 
        return f_path
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
@@ -457,54 +452,49 @@ class ScmModel(object):
 

	
 
    def create_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset
 
        """
 

	
 
        user = User.guess_instance(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            content = nodes[f_path]['content']
 
            f_path = self._sanitize_path(f_path)
 
            f_path = safe_str(f_path)
 
            # decoding here will force that we have proper encoded values
 
            # in any other case this will throw exceptions and deny commit
 
            if isinstance(content, (str,)):
 
                content = safe_str(content)
 
            else:
 
            if not isinstance(content, str) and not isinstance(content, bytes):
 
                content = content.read()
 
            processed_nodes.append((f_path, content))
 

	
 
        message = message
 
        committer = user.full_contact
 
        if not author:
 
            author = committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.add(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
kallithea/tests/base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import datetime
 
import logging
 
import os
 
import re
 
import tempfile
 
import time
 

	
 
import pytest
 
from webtest import TestApp
 

	
 
from kallithea.lib.utils2 import ascii_str, safe_str
 
from kallithea.lib.utils2 import ascii_str
 
from kallithea.model.db import User
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
skipif = pytest.mark.skipif
 
parametrize = pytest.mark.parametrize
 

	
 
# Hack: These module global values MUST be set to actual values before running any tests. This is currently done by conftest.py.
 
url = None
 
testapp = None
 

	
 
__all__ = [
 
    'skipif', 'parametrize', 'url', 'TestController',
 
    'ldap_lib_installed', 'pam_lib_installed', 'invalidate_all_caches',
 
    'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO',
 
    'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
 
    'TEST_USER_ADMIN_EMAIL', 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
 
    'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
 
    'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'IP_ADDR',
 
    'TEST_HG_REPO', 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
 
    'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO',
 
    'GIT_REMOTE_REPO', 'HG_TEST_REVISION', 'GIT_TEST_REVISION',
 
]
 
@@ -159,37 +159,36 @@ class TestController(object):
 
        if b'Invalid username or password' in response.body:
 
            pytest.fail('could not login using %s %s' % (username, password))
 

	
 
        assert response.status == '302 Found'
 
        self.assert_authenticated_user(response, username)
 

	
 
        response = response.follow()
 
        return response.session['authuser']
 

	
 
    def _get_logged_user(self):
 
        return User.get_by_username(self._logged_username)
 

	
 
    def assert_authenticated_user(self, response, expected_username):
 
        cookie = response.session.get('authuser')
 
        user = cookie and cookie.get('user_id')
 
        user = user and User.get(user)
 
        user = user and user.username
 
        assert user == expected_username
 

	
 
    def session_csrf_secret_token(self):
 
        return ascii_str(self.app.get(url('session_csrf_secret_token')).body)
 

	
 
    def checkSessionFlash(self, response, msg=None, skip=0, _matcher=lambda msg, m: msg in m):
 
        if 'flash' not in response.session:
 
            pytest.fail(safe_str(u'msg `%s` not found - session has no flash:\n%s' % (msg, response)))
 
            pytest.fail(u'msg `%s` not found - session has no flash:\n%s' % (msg, response))
 
        try:
 
            level, m = response.session['flash'][-1 - skip]
 
            if _matcher(msg, m):
 
                return
 
        except IndexError:
 
            pass
 
        pytest.fail(safe_str(u'msg `%s` not found in session flash (skipping %s): %s' %
 
                           (msg, skip,
 
                            ', '.join('`%s`' % m for level, m in response.session['flash']))))
 
        pytest.fail(u'msg `%s` not found in session flash (skipping %s): %s' %
 
                    (msg, skip, ', '.join('`%s`' % m for level, m in response.session['flash'])))
 

	
 
    def checkSessionFlashRegex(self, response, regex, skip=0):
 
        self.checkSessionFlash(response, regex, skip=skip, _matcher=re.search)
kallithea/tests/functional/test_admin_repos.py
Show inline comments
 
# -*- coding: utf-8 -*-
 

	
 
import os
 
import urllib.parse
 

	
 
import mock
 
import pytest
 

	
 
from kallithea.lib import vcs
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.model.db import Permission, RepoGroup, Repository, Ui, User, UserRepoToPerm
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.user import UserModel
 
from kallithea.tests import base
 
from kallithea.tests.fixture import Fixture, error_function
 

	
 

	
 
fixture = Fixture()
 

	
 

	
 
def _get_permission_for_user(user, repo):
 
    perm = UserRepoToPerm.query() \
 
                .filter(UserRepoToPerm.repository ==
 
                        Repository.get_by_repo_name(repo)) \
 
                .filter(UserRepoToPerm.user == User.get_by_username(user)) \
 
                .all()
 
    return perm
 

	
 

	
 
class _BaseTestCase(base.TestController):
 
    """
 
    Write all tests here
 
@@ -53,49 +52,49 @@ class _BaseTestCase(base.TestController)
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _session_csrf_secret_token=self.session_csrf_secret_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(base.url('repo_check_home', repo_name=repo_name))
 
        assert response.json == {u'result': True}
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 

	
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        assert new_repo.repo_name == repo_name
 
        assert new_repo.description == description
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)))
 
            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
 
        except vcs.exceptions.VCSError:
 
            pytest.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name)
 
        Session().commit()
 

	
 
    def test_case_insensitivity(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = u'description for newly created repo'
 
        response = self.app.post(base.url('repos'),
 
                                 fixture._get_repo_create_params(repo_private=False,
 
                                                                 repo_name=repo_name,
 
                                                                 repo_type=self.REPO_TYPE,
 
                                                                 repo_description=description,
 
                                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
 
        # try to create repo with swapped case
 
        swapped_repo_name = repo_name.swapcase()
 
        response = self.app.post(base.url('repos'),
 
                                 fixture._get_repo_create_params(repo_private=False,
 
                                                                 repo_name=swapped_repo_name,
 
                                                                 repo_type=self.REPO_TYPE,
 
                                                                 repo_description=description,
 
                                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
 
@@ -128,49 +127,49 @@ class _BaseTestCase(base.TestController)
 
        response = self.app.get(base.url('repo_check_home', repo_name=repo_name_full))
 
        assert response.json == {u'result': True}
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        assert new_repo.repo_name == repo_name_full
 
        assert new_repo.description == description
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        inherited_perms = UserRepoToPerm.query() \
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        assert len(inherited_perms) == 1
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full)))
 
            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            pytest.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()
 

	
 
    def test_create_in_group_without_needed_permissions(self):
 
        usr = self.log_user(base.TEST_USER_REGULAR_LOGIN, base.TEST_USER_REGULAR_PASS)
 
        # avoid spurious RepoGroup DetachedInstanceError ...
 
        session_csrf_secret_token = self.session_csrf_secret_token()
 
        # revoke
 
        user_model = UserModel()
 
        # disable fork and create on default user
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
 

	
 
        # disable on regular user
 
        user_model.revoke_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
 
        user_model.grant_perm(base.TEST_USER_REGULAR_LOGIN, 'hg.create.none')
 
@@ -220,49 +219,49 @@ class _BaseTestCase(base.TestController)
 
        response = self.app.get(base.url('repo_check_home', repo_name=repo_name_full))
 
        assert response.json == {u'result': True}
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        assert new_repo.repo_name == repo_name_full
 
        assert new_repo.description == description
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        inherited_perms = UserRepoToPerm.query() \
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        assert len(inherited_perms) == 1
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full)))
 
            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            pytest.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        RepoGroupModel().delete(group_name_allowed)
 
        Session().commit()
 

	
 
    def test_create_in_group_inherit_permissions(self):
 
        self.log_user()
 

	
 
        ## create GROUP
 
        group_name = u'sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description=u'test',
 
                                     owner=base.TEST_USER_ADMIN_LOGIN)
 
        perm = Permission.get_by_key('repository.write')
 
        RepoGroupModel().grant_user_permission(gr, base.TEST_USER_REGULAR_LOGIN, perm)
 

	
 
        ## add repo permissions
 
        Session().commit()
 

	
 
@@ -277,49 +276,49 @@ class _BaseTestCase(base.TestController)
 
                                                repo_group=gr.group_id,
 
                                                repo_copy_permissions=True,
 
                                                _session_csrf_secret_token=self.session_csrf_secret_token()))
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(base.url('repo_check_home', repo_name=repo_name_full))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        assert new_repo.repo_name == repo_name_full
 
        assert new_repo.description == description
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full)))
 
            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name_full))
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            pytest.fail('no repo %s in filesystem' % repo_name)
 

	
 
        # check if inherited permissiona are applied
 
        inherited_perms = UserRepoToPerm.query() \
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        assert len(inherited_perms) == 2
 

	
 
        assert base.TEST_USER_REGULAR_LOGIN in [x.user.username
 
                                                    for x in inherited_perms]
 
        assert 'repository.write' in [x.permission.permission_name
 
                                               for x in inherited_perms]
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()
 

	
 
    def test_create_remote_repo_wrong_clone_uri(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = u'description for newly created repo'
 
        response = self.app.post(base.url('repos'),
 
@@ -352,71 +351,71 @@ class _BaseTestCase(base.TestController)
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_name=repo_name,
 
                                                repo_description=description,
 
                                                _session_csrf_secret_token=self.session_csrf_secret_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(base.url('repo_check_home', repo_name=repo_name))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        assert new_repo.repo_name == repo_name
 
        assert new_repo.description == description
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(safe_str(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)))
 
            vcs.get_repo(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name))
 
        except vcs.exceptions.VCSError:
 
            pytest.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.post(base.url('delete_repo', repo_name=repo_name),
 
            params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 

	
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
 

	
 
        response.follow()
 

	
 
        # check if repo was deleted from db
 
        deleted_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        assert deleted_repo is None
 

	
 
        assert os.path.isdir(os.path.join(Ui.get_by_key('paths', '/').ui_value, repo_name)) == False
 

	
 
    def test_delete_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (safe_str(self.NEW_REPO), non_ascii)
 
        repo_name = "%s%s" % (self.NEW_REPO, non_ascii)
 
        description = 'description for newly created repo' + non_ascii
 
        response = self.app.post(base.url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _session_csrf_secret_token=self.session_csrf_secret_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(base.url('repo_check_home', repo_name=repo_name))
 
        assert response.json == {u'result': True}
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.parse.quote(repo_name), repo_name))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        assert new_repo.repo_name == repo_name
 
        assert new_repo.description == description
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
kallithea/tests/functional/test_forks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 

	
 
import urllib.parse
 

	
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.model.db import Repository, User
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.user import UserModel
 
from kallithea.tests import base
 
from kallithea.tests.fixture import Fixture
 

	
 

	
 
fixture = Fixture()
 

	
 

	
 
class _BaseTestCase(base.TestController):
 
    """
 
    Write all tests here
 
    """
 
    REPO = None
 
    REPO_TYPE = None
 
    NEW_REPO = None
 
    REPO_FORK = None
 

	
 
    def setup_method(self, method):
 
        self.username = u'forkuser'
 
        self.password = u'qweqwe'
 
        u1 = fixture.create_user(self.username, password=self.password, email=u'fork_king@example.com')
 
@@ -123,70 +122,70 @@ class _BaseTestCase(base.TestController)
 
                % (repo_name, fork_name_full, fork_name_full))
 

	
 
        # test if the fork was created in the database
 
        fork_repo = Session().query(Repository) \
 
            .filter(Repository.repo_name == fork_name_full).one()
 

	
 
        assert fork_repo.repo_name == fork_name_full
 
        assert fork_repo.fork.repo_name == repo_name
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(base.url('summary_home', repo_name=fork_name_full))
 
        response.mustcontain(fork_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 
        response.mustcontain('Fork of "<a href="/%s">%s</a>"' % (repo_name, repo_name))
 

	
 
        fixture.destroy_repo(fork_name_full)
 
        fixture.destroy_repo_group(group_id)
 

	
 
    def test_fork_unicode(self):
 
        self.log_user()
 

	
 
        # create a fork
 
        repo_name = self.REPO
 
        org_repo = Repository.get_by_repo_name(repo_name)
 
        fork_name = safe_str(self.REPO_FORK + u'-rødgrød')
 
        fork_name = self.REPO_FORK + u'-rødgrød'
 
        creation_args = {
 
            'repo_name': fork_name,
 
            'repo_group': u'-1',
 
            'fork_parent_id': org_repo.repo_id,
 
            'repo_type': self.REPO_TYPE,
 
            'description': 'unicode repo 1',
 
            'private': 'False',
 
            'landing_rev': 'rev:tip',
 
            '_session_csrf_secret_token': self.session_csrf_secret_token()}
 
        self.app.post(base.url(controller='forks', action='fork_create',
 
                          repo_name=repo_name), creation_args)
 
        response = self.app.get(base.url(controller='forks', action='forks',
 
                                    repo_name=repo_name))
 
        response.mustcontain(
 
            """<a href="/%s">%s</a>""" % (urllib.parse.quote(fork_name), fork_name)
 
        )
 
        fork_repo = Repository.get_by_repo_name(fork_name)
 
        assert fork_repo
 

	
 
        # fork the fork
 
        fork_name_2 = safe_str(self.REPO_FORK + u'-blåbærgrød')
 
        fork_name_2 = self.REPO_FORK + u'-blåbærgrød'
 
        creation_args = {
 
            'repo_name': fork_name_2,
 
            'repo_group': u'-1',
 
            'fork_parent_id': fork_repo.repo_id,
 
            'repo_type': self.REPO_TYPE,
 
            'description': 'unicode repo 2',
 
            'private': 'False',
 
            'landing_rev': 'rev:tip',
 
            '_session_csrf_secret_token': self.session_csrf_secret_token()}
 
        self.app.post(base.url(controller='forks', action='fork_create',
 
                          repo_name=fork_name), creation_args)
 
        response = self.app.get(base.url(controller='forks', action='forks',
 
                                    repo_name=fork_name))
 
        response.mustcontain(
 
            """<a href="/%s">%s</a>""" % (urllib.parse.quote(fork_name_2), fork_name_2)
 
        )
 

	
 
        # remove these forks
 
        response = self.app.post(base.url('delete_repo', repo_name=fork_name_2),
 
            params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 
        response = self.app.post(base.url('delete_repo', repo_name=fork_name),
 
            params={'_session_csrf_secret_token': self.session_csrf_secret_token()})
 

	
 
    def test_fork_create_and_permissions(self):
kallithea/tests/vcs/test_hg.py
Show inline comments
 
import os
 

	
 
import mock
 
import pytest
 

	
 
from kallithea.lib.vcs.backends.hg import MercurialChangeset, MercurialRepository
 
from kallithea.lib.vcs.exceptions import NodeDoesNotExistError, RepositoryError, VCSError
 
from kallithea.lib.vcs.nodes import NodeKind, NodeState
 
from kallithea.lib.vcs.utils import safe_str
 
from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH
 

	
 

	
 
class TestMercurialRepository(object):
 

	
 
    def __check_for_existing_repo(self):
 
        if os.path.exists(TEST_HG_REPO_CLONE):
 
            pytest.fail('Cannot test mercurial clone repo as location %s already '
 
                      'exists. You should manually remove it first.'
 
                      % TEST_HG_REPO_CLONE)
 

	
 
    def setup_method(self):
 
        self.repo = MercurialRepository(safe_str(TEST_HG_REPO))
 
        self.repo = MercurialRepository(TEST_HG_REPO)
 

	
 
    def test_wrong_repo_path(self):
 
        wrong_repo_path = os.path.join(TESTS_TMP_PATH, 'errorrepo')
 
        with pytest.raises(RepositoryError):
 
            MercurialRepository(wrong_repo_path)
 

	
 
    def test_unicode_path_repo(self):
 
        with pytest.raises(VCSError):
 
            MercurialRepository(u'iShouldFail')
 

	
 
    def test_repo_clone(self):
 
        self.__check_for_existing_repo()
 
        repo = MercurialRepository(safe_str(TEST_HG_REPO))
 
        repo = MercurialRepository(TEST_HG_REPO)
 
        repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
 
            src_url=TEST_HG_REPO, update_after_clone=True)
 
        assert len(repo.revisions) == len(repo_clone.revisions)
 
        # Checking hashes of changesets should be enough
 
        for changeset in repo.get_changesets():
 
            raw_id = changeset.raw_id
 
            assert raw_id == repo_clone.get_changeset(raw_id).raw_id
 

	
 
    def test_repo_clone_with_update(self):
 
        repo = MercurialRepository(safe_str(TEST_HG_REPO))
 
        repo = MercurialRepository(TEST_HG_REPO)
 
        repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update',
 
            src_url=TEST_HG_REPO, update_after_clone=True)
 
        assert len(repo.revisions) == len(repo_clone.revisions)
 

	
 
        # check if current workdir was updated
 
        assert os.path.isfile(
 
            os.path.join(
 
                TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'
 
            )
 
        )
 

	
 
    def test_repo_clone_without_update(self):
 
        repo = MercurialRepository(safe_str(TEST_HG_REPO))
 
        repo = MercurialRepository(TEST_HG_REPO)
 
        repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update',
 
            src_url=TEST_HG_REPO, update_after_clone=False)
 
        assert len(repo.revisions) == len(repo_clone.revisions)
 
        assert not os.path.isfile(
 
            os.path.join(
 
                TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'
 
            )
 
        )
 

	
 
    def test_pull(self):
 
        if os.path.exists(TEST_HG_REPO_PULL):
 
            pytest.fail('Cannot test mercurial pull command as location %s '
 
                      'already exists. You should manually remove it first'
 
                      % TEST_HG_REPO_PULL)
 
        repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True)
 
        assert len(self.repo.revisions) > len(repo_new.revisions)
 

	
 
        repo_new.pull(self.repo.path)
 
        repo_new = MercurialRepository(TEST_HG_REPO_PULL)
 
        assert len(self.repo.revisions) == len(repo_new.revisions)
 

	
 
    def test_revisions(self):
 
        # there are 21 revisions at bitbucket now
 
        # so we can assume they would be available from now on
 
@@ -235,49 +234,49 @@ TODO: To be written...
 
        assert node.kind == NodeKind.FILE
 
        assert node.content == readme
 

	
 
    @mock.patch('mercurial.mdiff.diffopts')
 
    def test_get_diff_does_not_sanitize_zero_context(self, mock_diffopts):
 
        zero_context = 0
 

	
 
        self.repo.get_diff(0, 1, 'foo', context=zero_context)
 

	
 
        mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
 

	
 
    @mock.patch('mercurial.mdiff.diffopts')
 
    def test_get_diff_sanitizes_negative_context(self, mock_diffopts):
 
        negative_context = -10
 
        zero_context = 0
 

	
 
        self.repo.get_diff(0, 1, 'foo', context=negative_context)
 

	
 
        mock_diffopts.assert_called_once_with(git=True, showfunc=True, ignorews=False, context=zero_context)
 

	
 

	
 
class TestMercurialChangeset(object):
 

	
 
    def setup_method(self):
 
        self.repo = MercurialRepository(safe_str(TEST_HG_REPO))
 
        self.repo = MercurialRepository(TEST_HG_REPO)
 

	
 
    def _test_equality(self, changeset):
 
        revision = changeset.revision
 
        assert changeset == self.repo.get_changeset(revision)
 

	
 
    def test_equality(self):
 
        revs = [0, 10, 20]
 
        changesets = [self.repo.get_changeset(rev) for rev in revs]
 
        for changeset in changesets:
 
            self._test_equality(changeset)
 

	
 
    def test_default_changeset(self):
 
        tip = self.repo.get_changeset('tip')
 
        assert tip == self.repo.get_changeset()
 
        assert tip == self.repo.get_changeset(revision=None)
 
        assert tip == list(self.repo[-1:])[0]
 

	
 
    def test_root_node(self):
 
        tip = self.repo.get_changeset('tip')
 
        assert tip.root is tip.get_node('')
 

	
 
    def test_lazy_fetch(self):
 
        """
 
        Test if changeset's nodes expands and are cached as we walk through
kallithea/tests/vcs/test_vcs.py
Show inline comments
 
import os
 
import shutil
 

	
 
import pytest
 

	
 
from kallithea.lib.vcs import VCSError, get_backend, get_repo
 
from kallithea.lib.vcs.backends.hg import MercurialRepository
 
from kallithea.lib.vcs.utils import safe_str
 
from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_HG_REPO, TESTS_TMP_PATH
 

	
 

	
 
class TestVCS(object):
 
    """
 
    Tests for main module's methods.
 
    """
 

	
 
    def test_get_backend(self):
 
        hg = get_backend('hg')
 
        assert hg == MercurialRepository
 

	
 
    def test_alias_detect_hg(self):
 
        alias = 'hg'
 
        path = TEST_HG_REPO
 
        backend = get_backend(alias)
 
        repo = backend(safe_str(path))
 
        repo = backend(path)
 
        assert 'hg' == repo.alias
 

	
 
    def test_alias_detect_git(self):
 
        alias = 'git'
 
        path = TEST_GIT_REPO
 
        backend = get_backend(alias)
 
        repo = backend(safe_str(path))
 
        repo = backend(path)
 
        assert 'git' == repo.alias
 

	
 
    def test_wrong_alias(self):
 
        alias = 'wrong_alias'
 
        with pytest.raises(VCSError):
 
            get_backend(alias)
 

	
 
    def test_get_repo(self):
 
        alias = 'hg'
 
        path = TEST_HG_REPO
 
        backend = get_backend(alias)
 
        repo = backend(safe_str(path))
 
        repo = backend(path)
 

	
 
        assert repo.__class__ == get_repo(safe_str(path), alias).__class__
 
        assert repo.path == get_repo(safe_str(path), alias).path
 
        assert repo.__class__ == get_repo(path, alias).__class__
 
        assert repo.path == get_repo(path, alias).path
 

	
 
    def test_get_repo_autoalias_hg(self):
 
        alias = 'hg'
 
        path = TEST_HG_REPO
 
        backend = get_backend(alias)
 
        repo = backend(safe_str(path))
 
        repo = backend(path)
 

	
 
        assert repo.__class__ == get_repo(safe_str(path)).__class__
 
        assert repo.path == get_repo(safe_str(path)).path
 
        assert repo.__class__ == get_repo(path).__class__
 
        assert repo.path == get_repo(path).path
 

	
 
    def test_get_repo_autoalias_git(self):
 
        alias = 'git'
 
        path = TEST_GIT_REPO
 
        backend = get_backend(alias)
 
        repo = backend(safe_str(path))
 
        repo = backend(path)
 

	
 
        assert repo.__class__ == get_repo(safe_str(path)).__class__
 
        assert repo.path == get_repo(safe_str(path)).path
 
        assert repo.__class__ == get_repo(path).__class__
 
        assert repo.path == get_repo(path).path
 

	
 
    def test_get_repo_err(self):
 
        blank_repo_path = os.path.join(TESTS_TMP_PATH, 'blank-error-repo')
 
        if os.path.isdir(blank_repo_path):
 
            shutil.rmtree(blank_repo_path)
 

	
 
        os.mkdir(blank_repo_path)
 
        with pytest.raises(VCSError):
 
            get_repo(blank_repo_path)
 
        with pytest.raises(VCSError):
 
            get_repo(blank_repo_path + 'non_existing')
 

	
 
    def test_get_repo_multialias(self):
 
        multialias_repo_path = os.path.join(TESTS_TMP_PATH, 'hg-git-repo')
 
        if os.path.isdir(multialias_repo_path):
 
            shutil.rmtree(multialias_repo_path)
 

	
 
        os.mkdir(multialias_repo_path)
 

	
 
        os.mkdir(os.path.join(multialias_repo_path, '.git'))
 
        os.mkdir(os.path.join(multialias_repo_path, '.hg'))
 
        with pytest.raises(VCSError):
 
            get_repo(multialias_repo_path)
0 comments (0 inline, 0 general)