Changeset - 5b1f43027662
[Not reviewed]
default
0 15 0
Mads Kiilerich - 6 years ago 2020-03-10 15:08:46
mads@kiilerich.com
scripts: drop isort --wrap-length 160 - it is broken with py3 and not really necessary

Under Python 3, isort 4.3.21 failed with
https://github.com/timothycrosley/isort/issues/741 on
kallithea/controllers/api/api.py :

Traceback (most recent call last):
File "data/env/bin/isort", line 10, in <module>
sys.exit(main())
File ".../env/lib64/python3.7/site-packages/isort/main.py", line 379, in main
for sort_attempt in attempt_iterator:
File ".../env/lib64/python3.7/site-packages/isort/main.py", line 377, in <genexpr>
attempt_iterator = (sort_imports(file_name, **arguments) for file_name in file_names)
File ".../env/lib64/python3.7/site-packages/isort/main.py", line 88, in sort_imports
result = SortImports(file_name, **arguments)
File ".../env/lib64/python3.7/site-packages/isort/isort.py", line 207, in __init__
self._add_formatted_imports()
File ".../env/lib64/python3.7/site-packages/isort/isort.py", line 606, in _add_formatted_imports
self._add_from_imports(from_modules, section, section_output, sort_ignore_case)
File ".../env/lib64/python3.7/site-packages/isort/isort.py", line 526, in _add_from_imports
import_statement = self._multi_line_reformat(import_start, from_import_section, comments)
File ".../env/lib64/python3.7/site-packages/isort/isort.py", line 552, in _multi_line_reformat
dynamic_indent, indent, line_length, comments)
File ".../env/lib64/python3.7/site-packages/isort/isort.py", line 705, in _output_grid
if len(next_statement.split(self.line_separator)[-1]) + 1 > line_length:
TypeError: '>' not supported between instances of 'int' and 'str'
15 files changed with 29 insertions and 29 deletions:
0 comments (0 inline, 0 general)
kallithea/controllers/api/api.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.api.api
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
API controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Aug 20, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
from datetime import datetime
 

	
 
from tg import request
 

	
 
from kallithea.controllers.api import JSONRPCController, JSONRPCError
 
from kallithea.lib.auth import (
 
    AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel)
 
from kallithea.lib.auth import (AuthUser, HasPermissionAny, HasPermissionAnyDecorator, HasRepoGroupPermissionLevel, HasRepoPermissionLevel,
 
                                HasUserGroupPermissionLevel)
 
from kallithea.lib.exceptions import DefaultUserException, UserGroupsAssignedException
 
from kallithea.lib.utils import action_logger, repo2db_mapper
 
from kallithea.lib.utils2 import OAttr, Optional
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.exceptions import EmptyRepositoryError
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.db import ChangesetStatus, Gist, Permission, PullRequest, RepoGroup, Repository, Setting, User, UserGroup, UserIpMap
 
from kallithea.model.gist import GistModel
 
from kallithea.model.meta import Session
 
from kallithea.model.pull_request import PullRequestModel
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.scm import ScmModel, UserGroupList
 
from kallithea.model.user import UserModel
 
from kallithea.model.user_group import UserGroupModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def store_update(updates, attr, name):
 
    """
 
    Stores param in updates dict if it's not instance of Optional
 
    allows easy updates of passed in params
 
    """
 
    if not isinstance(attr, Optional):
 
        updates[name] = attr
 

	
 

	
 
def get_user_or_error(userid):
 
    """
 
    Get user by id or name or return JsonRPCError if not found
 

	
 
    :param userid:
 
    """
 
    user = UserModel().get_user(userid)
 
    if user is None:
 
        raise JSONRPCError("user `%s` does not exist" % (userid,))
 
    return user
 

	
 

	
 
def get_repo_or_error(repoid):
 
    """
 
    Get repo by id or name or return JsonRPCError if not found
 

	
 
    :param repoid:
 
    """
 
    repo = RepoModel().get_repo(repoid)
 
    if repo is None:
 
        raise JSONRPCError('repository `%s` does not exist' % (repoid,))
 
    return repo
 

	
 

	
 
def get_repo_group_or_error(repogroupid):
 
    """
 
    Get repo group by id or name or return JsonRPCError if not found
 

	
 
    :param repogroupid:
 
    """
 
    repo_group = RepoGroup.guess_instance(repogroupid)
 
    if repo_group is None:
 
        raise JSONRPCError(
 
            'repository group `%s` does not exist' % (repogroupid,))
 
    return repo_group
 

	
 

	
 
def get_user_group_or_error(usergroupid):
 
    """
 
    Get user group by id or name or return JsonRPCError if not found
 

	
 
    :param usergroupid:
 
    """
 
    user_group = UserGroupModel().get_group(usergroupid)
 
    if user_group is None:
 
        raise JSONRPCError('user group `%s` does not exist' % (usergroupid,))
 
    return user_group
 

	
 

	
 
def get_perm_or_error(permid, prefix=None):
 
    """
 
    Get permission by id or name or return JsonRPCError if not found
 

	
 
    :param permid:
 
    """
 
    perm = Permission.get_by_key(permid)
 
    if perm is None:
 
        raise JSONRPCError('permission `%s` does not exist' % (permid,))
 
    if prefix:
 
        if not perm.permission_name.startswith(prefix):
 
            raise JSONRPCError('permission `%s` is invalid, '
 
                               'should start with %s' % (permid, prefix))
 
    return perm
 

	
 

	
 
def get_gist_or_error(gistid):
 
    """
 
    Get gist by id or gist_access_id or return JsonRPCError if not found
 

	
 
    :param gistid:
 
    """
 
    gist = GistModel().get_gist(gistid)
 
    if gist is None:
 
        raise JSONRPCError('gist `%s` does not exist' % (gistid,))
 
    return gist
 

	
 

	
 
class ApiController(JSONRPCController):
 
    """
 
    API Controller
 

	
 
    The authenticated user can be found as request.authuser.
 

	
 
    Example function::
 

	
 
        def func(arg1, arg2,...):
 
            pass
 

	
 
    Each function should also **raise** JSONRPCError for any
 
    errors that happens.
 
    """
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def test(self, args):
 
        return args
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def pull(self, repoid, clone_uri=Optional(None)):
 
        """
 
        Triggers a pull from remote location on given repo. Can be used to
 
        automatically keep remote repos up to date. This command can be executed
 
        only using api_key belonging to user with admin rights
 

	
 
        :param repoid: repository name or repository id
 
        :type repoid: str or int
 
        :param clone_uri: repository URI to pull from (optional)
 
        :type clone_uri: str
 

	
 
        OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : {
 
            "msg": "Pulled from `<repository name>`"
 
            "repository": "<repository name>"
 
          }
 
          error :  null
 

	
 
        ERROR OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : null
 
          error :  {
 
            "Unable to pull changes from `<reponame>`"
 
          }
 

	
 
        """
 

	
 
        repo = get_repo_or_error(repoid)
 

	
 
        try:
 
            ScmModel().pull_changes(repo.repo_name,
 
                                    request.authuser.username,
 
                                    request.ip_addr,
 
                                    clone_uri=Optional.extract(clone_uri))
 
            return dict(
 
                msg='Pulled from `%s`' % repo.repo_name,
 
                repository=repo.repo_name
 
            )
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise JSONRPCError(
 
                'Unable to pull changes from `%s`' % repo.repo_name
 
            )
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def rescan_repos(self, remove_obsolete=Optional(False)):
 
        """
 
        Triggers rescan repositories action. If remove_obsolete is set
 
        than also delete repos that are in database but not in the filesystem.
 
        aka "clean zombies". This command can be executed only using api_key
 
        belonging to user with admin rights.
 

	
 
        :param remove_obsolete: deletes repositories from
 
            database that are not found on the filesystem
 
        :type remove_obsolete: Optional(bool)
 

	
 
        OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : {
 
            'added': [<added repository name>,...]
 
            'removed': [<removed repository name>,...]
 
          }
 
          error :  null
 

	
 
        ERROR OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : null
 
          error :  {
 
            'Error occurred during rescan repositories action'
 
          }
 

	
 
        """
 

	
 
        try:
 
            rm_obsolete = Optional.extract(remove_obsolete)
 
            added, removed = repo2db_mapper(ScmModel().repo_scan(),
 
                                            remove_obsolete=rm_obsolete)
 
            return {'added': added, 'removed': removed}
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise JSONRPCError(
 
                'Error occurred during rescan repositories action'
 
            )
 

	
 
    def invalidate_cache(self, repoid):
 
        """
 
        Invalidate cache for repository.
 
        This command can be executed only using api_key belonging to user with admin
 
        rights or regular user that have write or admin or write access to repository.
 

	
 
        :param repoid: repository name or repository id
 
        :type repoid: str or int
 

	
 
        OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : {
 
            'msg': Cache for repository `<repository name>` was invalidated,
 
            'repository': <repository name>
 
          }
 
          error :  null
 

	
 
        ERROR OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : null
 
          error :  {
 
            'Error occurred during cache invalidation action'
 
          }
 

	
 
        """
 
        repo = get_repo_or_error(repoid)
 
        if not HasPermissionAny('hg.admin')():
 
            if not HasRepoPermissionLevel('write')(repo.repo_name):
 
                raise JSONRPCError('repository `%s` does not exist' % (repoid,))
 

	
 
        try:
 
            ScmModel().mark_for_invalidation(repo.repo_name)
 
            return dict(
 
                msg='Cache for repository `%s` was invalidated' % (repoid,),
 
                repository=repo.repo_name
 
            )
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise JSONRPCError(
 
                'Error occurred during cache invalidation action'
 
            )
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def get_ip(self, userid=Optional(OAttr('apiuser'))):
 
        """
 
        Shows IP address as seen from Kallithea server, together with all
 
        defined IP addresses for given user. If userid is not passed data is
 
        returned for user who's calling this function.
 
        This command can be executed only using api_key belonging to user with
 
        admin rights.
 

	
 
        :param userid: username to show ips for
 
        :type userid: Optional(str or int)
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result : {
 
                         "server_ip_addr": "<ip_from_clien>",
 
                         "user_ips": [
 
                                        {
 
                                           "ip_addr": "<ip_with_mask>",
 
                                           "ip_range": ["<start_ip>", "<end_ip>"],
 
                                        },
 
                                        ...
 
                                     ]
 
            }
 

	
 
        """
 
        if isinstance(userid, Optional):
 
            userid = request.authuser.user_id
 
        user = get_user_or_error(userid)
 
        ips = UserIpMap.query().filter(UserIpMap.user == user).all()
 
        return dict(
 
            server_ip_addr=request.ip_addr,
 
            user_ips=ips
 
        )
 

	
 
    # alias for old
 
    show_ip = get_ip
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def get_server_info(self):
 
        """
 
        return server info, including Kallithea version and installed packages
 

	
 

	
 
        OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : {
 
            'modules': [<module name>,...]
 
            'py_version': <python version>,
 
            'platform': <platform type>,
 
            'kallithea_version': <kallithea version>
 
          }
 
          error :  null
 
        """
 
        return Setting.get_server_info()
 

	
 
    def get_user(self, userid=Optional(OAttr('apiuser'))):
 
        """
 
        Gets a user by username or user_id, Returns empty result if user is
 
        not found. If userid param is skipped it is set to id of user who is
 
        calling this method. This command can be executed only using api_key
 
        belonging to user with admin rights, or regular users that cannot
 
        specify different userid than theirs
 

	
 
        :param userid: user to get data for
 
        :type userid: Optional(str or int)
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result: None if user does not exist or
 
                    {
 
                        "user_id" :     "<user_id>",
 
                        "api_key" :     "<api_key>",
 
                        "api_keys":     "[<list of all API keys including additional ones>]"
 
                        "username" :    "<username>",
 
                        "firstname":    "<firstname>",
 
                        "lastname" :    "<lastname>",
 
                        "email" :       "<email>",
 
                        "emails":       "[<list of all emails including additional ones>]",
 
                        "ip_addresses": "[<ip_address_for_user>,...]",
 
                        "active" :      "<bool: user active>",
 
                        "admin" :       "<bool: user is admin>",
 
                        "extern_name" : "<extern_name>",
 
                        "extern_type" : "<extern type>
 
                        "last_login":   "<last_login>",
 
                        "permissions": {
 
                            "global": ["hg.create.repository",
 
                                       "repository.read",
 
                                       "hg.register.manual_activate"],
 
                            "repositories": {"repo1": "repository.none"},
 
                            "repositories_groups": {"Group1": "group.read"}
 
                         },
 
                    }
 

	
 
            error:  null
 

	
 
        """
 
        if not HasPermissionAny('hg.admin')():
 
            # make sure normal user does not pass someone else userid,
 
            # he is not allowed to do that
 
            if not isinstance(userid, Optional) and userid != request.authuser.user_id:
 
                raise JSONRPCError(
 
                    'userid is not the same as your user'
 
                )
 

	
 
        if isinstance(userid, Optional):
 
            userid = request.authuser.user_id
 

	
 
        user = get_user_or_error(userid)
 
        data = user.get_api_data()
 
        data['permissions'] = AuthUser(user_id=user.user_id).permissions
 
        return data
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def get_users(self):
 
        """
 
        Lists all existing users. This command can be executed only using api_key
 
        belonging to user with admin rights.
 

	
 

	
 
        OUTPUT::
kallithea/controllers/files.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.files
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Files controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import os
 
import posixpath
 
import shutil
 
import tempfile
 
import traceback
 
from collections import OrderedDict
 

	
 
from tg import request, response
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPFound, HTTPNotFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.controllers.changeset import _context_url, _ignorews_url, anchor_url, get_ignore_ws, get_line_ctx
 
from kallithea.lib import diffs
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired
 
from kallithea.lib.base import BaseRepoController, jsonify, render
 
from kallithea.lib.exceptions import NonRelativePathError
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_int, safe_str, str2bool
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import (
 
    ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError, NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
 
from kallithea.lib.vcs.exceptions import (ChangesetDoesNotExistError, ChangesetError, EmptyRepositoryError, ImproperArchiveTypeError, NodeAlreadyExistsError,
 
                                          NodeDoesNotExistError, NodeError, RepositoryError, VCSError)
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.model.db import Repository
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FilesController(BaseRepoController):
 

	
 
    def _before(self, *args, **kwargs):
 
        super(FilesController, self)._before(*args, **kwargs)
 

	
 
    def __get_cs(self, rev, silent_empty=False):
 
        """
 
        Safe way to get changeset if error occur it redirects to tip with
 
        proper message
 

	
 
        :param rev: revision to fetch
 
        :silent_empty: return None if repository is empty
 
        """
 

	
 
        try:
 
            return c.db_repo_scm_instance.get_changeset(rev)
 
        except EmptyRepositoryError as e:
 
            if silent_empty:
 
                return None
 
            url_ = url('files_add_home',
 
                       repo_name=c.repo_name,
 
                       revision=0, f_path='', anchor='edit')
 
            add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
 
            h.flash(_('There are no files yet.') + ' ' + add_new, category='warning')
 
            raise HTTPNotFound()
 
        except (ChangesetDoesNotExistError, LookupError):
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        except RepositoryError as e:
 
            h.flash(e, category='error')
 
            raise HTTPNotFound()
 

	
 
    def __get_filenode(self, cs, path):
 
        """
 
        Returns file_node or raise HTTP error.
 

	
 
        :param cs: given changeset
 
        :param path: path to lookup
 
        """
 

	
 
        try:
 
            file_node = cs.get_node(path)
 
            if file_node.is_dir():
 
                raise RepositoryError('given path is a directory')
 
        except ChangesetDoesNotExistError:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        except RepositoryError as e:
 
            h.flash(e, category='error')
 
            raise HTTPNotFound()
 

	
 
        return file_node
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def index(self, repo_name, revision, f_path, annotate=False):
 
        # redirect to given revision from form if given
 
        post_revision = request.POST.get('at_rev', None)
 
        if post_revision:
 
            cs = self.__get_cs(post_revision) # FIXME - unused!
 

	
 
        c.revision = revision
 
        c.changeset = self.__get_cs(revision)
 
        c.branch = request.GET.get('branch', None)
 
        c.f_path = f_path
 
        c.annotate = annotate
 
        cur_rev = c.changeset.revision
 
        # used in files_source.html:
 
        c.cut_off_limit = self.cut_off_limit
 
        c.fulldiff = request.GET.get('fulldiff')
 

	
 
        # prev link
 
        try:
 
            prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
 
            c.url_prev = url('files_home', repo_name=c.repo_name,
 
                         revision=prev_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_prev += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_prev = '#'
 

	
 
        # next link
 
        try:
 
            next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
 
            c.url_next = url('files_home', repo_name=c.repo_name,
 
                     revision=next_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_next += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_next = '#'
 

	
 
        # files or dirs
 
        try:
 
            c.file = c.changeset.get_node(f_path)
 

	
 
            if c.file.is_submodule():
 
                raise HTTPFound(location=c.file.url)
 
            elif c.file.is_file():
 
                c.load_full_history = False
 
                # determine if we're on branch head
 
                _branches = c.db_repo_scm_instance.branches
 
                c.on_branch_head = revision in _branches or revision in _branches.values()
 
                _hist = []
 
                c.file_history = []
 
                if c.load_full_history:
 
                    c.file_history, _hist = self._get_node_history(c.changeset, f_path)
 

	
 
                c.authors = []
 
                for a in set([x.author for x in _hist]):
 
                    c.authors.append((h.email(a), h.person(a)))
 
            else:
 
                c.authors = c.file_history = []
 
        except RepositoryError as e:
 
            h.flash(e, category='error')
 
            raise HTTPNotFound()
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return render('files/files_ypjax.html')
 

	
 
        # TODO: tags and bookmarks?
 
        c.revision_options = [(c.changeset.raw_id,
 
                              _('%s at %s') % (b, h.short_id(c.changeset.raw_id))) for b in c.changeset.branches] + \
 
            [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
 
        if c.db_repo_scm_instance.closed_branches:
 
            prefix = _('(closed)') + ' '
 
            c.revision_options += [('-', '-')] + \
 
                [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
 

	
 
        return render('files/files.html')
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    @jsonify
 
    def history(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 

	
 
            res = []
 
            for obj in file_history:
 
                res.append({
 
                    'text': obj[1],
 
                    'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
 
                })
 

	
 
            data = {
 
                'more': False,
 
                'results': res
 
            }
 
            return data
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def authors(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 
            c.authors = []
 
            for a in set([x.author for x in _hist]):
 
                c.authors.append((h.email(a), h.person(a)))
 
            return render('files/files_history_box.html')
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def rawfile(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        response.content_disposition = \
 
            'attachment; filename=%s' % f_path.split(Repository.url_sep())[-1]
 

	
 
        response.content_type = file_node.mimetype
 
        return file_node.content
 

	
 
    @LoginRequired(allow_default_user=True)
 
    @HasRepoPermissionLevelDecorator('read')
 
    def raw(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        raw_mimetype_mapping = {
 
            # map original mimetype to a mimetype used for "show as raw"
 
            # you can also provide a content-disposition to override the
 
            # default "attachment" disposition.
 
            # orig_type: (new_type, new_dispo)
 

	
 
            # show images inline:
 
            'image/x-icon': ('image/x-icon', 'inline'),
 
            'image/png': ('image/png', 'inline'),
 
            'image/gif': ('image/gif', 'inline'),
 
            'image/jpeg': ('image/jpeg', 'inline'),
 
            'image/svg+xml': ('image/svg+xml', 'inline'),
 
        }
 

	
 
        mimetype = file_node.mimetype
 
        try:
 
            mimetype, dispo = raw_mimetype_mapping[mimetype]
 
        except KeyError:
 
            # we don't know anything special about this, handle it safely
 
            if file_node.is_binary:
 
                # do same as download raw for binary files
 
                mimetype, dispo = 'application/octet-stream', 'attachment'
 
            else:
 
                # do not just use the original mimetype, but force text/plain,
 
                # otherwise it would serve text/html and that might be unsafe.
 
                # Note: underlying vcs library fakes text/plain mimetype if the
 
                # mimetype can not be determined and it thinks it is not
 
                # binary.This might lead to erroneous text display in some
 
                # cases, but helps in other cases, like with text files
 
                # without extension.
 
                mimetype, dispo = 'text/plain', 'inline'
 

	
 
        if dispo == 'attachment':
 
            dispo = 'attachment; filename=%s' % f_path.split(os.sep)[-1]
 

	
 
        response.content_disposition = dispo
 
        response.content_type = mimetype
 
        return file_node.content
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionLevelDecorator('write')
 
    def delete(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches and revision not in _branches.values():
 
            h.flash(_('You can only delete files with revision '
 
                      'being a valid branch'), category='warning')
 
            raise HTTPFound(location=h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 

	
 
        c.default_message = _('Deleted file %s via Kallithea') % (f_path)
 
        c.f_path = f_path
 
        node_path = f_path
 
        author = request.authuser.full_contact
 

	
 
        if r_post:
 
            message = r_post.get('message') or c.default_message
 

	
 
            try:
 
                nodes = {
 
                    node_path: {
 
                        'content': ''
 
                    }
 
                }
 
                self.scm_model.delete_nodes(
 
                    user=request.authuser.user_id,
 
                    ip_addr=request.ip_addr,
 
                    repo=c.db_repo,
 
                    message=message,
 
                    nodes=nodes,
 
                    parent_cs=c.cs,
 
                    author=author,
 
                )
 

	
 
                h.flash(_('Successfully deleted file %s') % f_path,
 
                        category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            raise HTTPFound(location=url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_delete.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionLevelDecorator('write')
 
    def edit(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches and revision not in _branches.values():
 
            h.flash(_('You can only edit files with revision '
 
                      'being a valid branch'), category='warning')
 
            raise HTTPFound(location=h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 

	
 
        if c.file.is_binary:
 
            raise HTTPFound(location=url('files_home', repo_name=c.repo_name,
 
                            revision=c.cs.raw_id, f_path=f_path))
 
        c.default_message = _('Edited file %s via Kallithea') % (f_path)
 
        c.f_path = f_path
 

	
 
        if r_post:
 
            old_content = safe_str(c.file.content)
 
            sl = old_content.splitlines(1)
 
            first_line = sl[0] if sl else ''
 
            # modes:  0 - Unix, 1 - Mac, 2 - DOS
 
            mode = detect_mode(first_line, 0)
 
            content = convert_line_endings(r_post.get('content', ''), mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            author = request.authuser.full_contact
 

	
 
            if content == old_content:
 
                h.flash(_('No changes'), category='warning')
 
                raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            try:
 
                self.scm_model.commit_change(repo=c.db_repo_scm_instance,
 
                                             repo_name=repo_name, cs=c.cs,
 
                                             user=request.authuser.user_id,
 
                                             ip_addr=request.ip_addr,
 
                                             author=author, message=message,
 
                                             content=content, f_path=f_path)
 
                h.flash(_('Successfully committed to %s') % f_path,
 
                        category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            raise HTTPFound(location=url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_edit.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionLevelDecorator('write')
 
    def add(self, repo_name, revision, f_path):
 

	
 
        repo = c.db_repo
 
        r_post = request.POST
 
        c.cs = self.__get_cs(revision, silent_empty=True)
 
        if c.cs is None:
 
            c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias)
 
        c.default_message = (_('Added file via Kallithea'))
 
        c.f_path = f_path
 

	
 
        if r_post:
 
            unix_mode = 0
 
            content = convert_line_endings(r_post.get('content', ''), unix_mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            filename = r_post.get('filename')
 
            location = r_post.get('location', '')
 
            file_obj = r_post.get('upload_file', None)
 

	
 
            if file_obj is not None and hasattr(file_obj, 'filename'):
 
                filename = file_obj.filename
 
                content = file_obj.file
 

	
 
                if hasattr(content, 'file'):
 
                    # non posix systems store real file under file attr
 
                    content = content.file
 

	
 
            if not content:
 
                h.flash(_('No content'), category='warning')
 
                raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            if not filename:
 
                h.flash(_('No filename'), category='warning')
 
                raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            # strip all crap out of file, just leave the basename
 
            filename = os.path.basename(filename)
 
            node_path = posixpath.join(location, filename)
kallithea/lib/auth.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth
 
~~~~~~~~~~~~~~~~~~
 

	
 
authentication and permission libraries
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 
import hashlib
 
import itertools
 
import logging
 
import os
 
import string
 

	
 
import bcrypt
 
import ipaddr
 
from decorator import decorator
 
from sqlalchemy.orm import joinedload
 
from sqlalchemy.orm.exc import ObjectDeletedError
 
from tg import request
 
from tg.i18n import ugettext as _
 
from webob.exc import HTTPForbidden, HTTPFound
 

	
 
from kallithea.config.routing import url
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.utils import conditional_cache, get_repo_group_slug, get_repo_slug, get_user_group_slug
 
from kallithea.lib.utils2 import ascii_bytes, ascii_str, safe_bytes
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model.db import (
 
    Permission, RepoGroup, Repository, User, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
 
from kallithea.model.db import (Permission, RepoGroup, Repository, User, UserApiKeys, UserGroup, UserGroupMember, UserGroupRepoGroupToPerm, UserGroupRepoToPerm,
 
                                UserGroupToPerm, UserGroupUserGroupToPerm, UserIpMap, UserToPerm)
 
from kallithea.model.meta import Session
 
from kallithea.model.user import UserModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PasswordGenerator(object):
 
    """
 
    This is a simple class for generating password from different sets of
 
    characters
 
    usage::
 

	
 
        passwd_gen = PasswordGenerator()
 
        #print 8-letter password containing only big and small letters
 
            of alphabet
 
        passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
 
    """
 
    ALPHABETS_NUM = r'''1234567890'''
 
    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
 
    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
 
    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
 
    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
 
        + ALPHABETS_NUM + ALPHABETS_SPECIAL
 
    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
 
    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
 
    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
 
    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
 

	
 
    def gen_password(self, length, alphabet=ALPHABETS_FULL):
 
        assert len(alphabet) <= 256, alphabet
 
        l = []
 
        while len(l) < length:
 
            i = ord(os.urandom(1))
 
            if i < len(alphabet):
 
                l.append(alphabet[i])
 
        return ''.join(l)
 

	
 

	
 
def get_crypt_password(password):
 
    """
 
    Cryptographic function used for bcrypt password hashing.
 

	
 
    :param password: password to hash
 
    """
 
    return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10)))
 

	
 

	
 
def check_password(password, hashed):
 
    """
 
    Checks password match the hashed value using bcrypt.
 
    Remains backwards compatible and accept plain sha256 hashes which used to
 
    be used on Windows.
 

	
 
    :param password: password
 
    :param hashed: password in hashed form
 
    """
 
    # sha256 hashes will always be 64 hex chars
 
    # bcrypt hashes will always contain $ (and be shorter)
 
    if len(hashed) == 64 and all(x in string.hexdigits for x in hashed):
 
        return hashlib.sha256(password).hexdigest() == hashed
 
    try:
 
        return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed))
 
    except ValueError as e:
 
        # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
 
        log.error('error from bcrypt checking password: %s', e)
 
        return False
 
    log.error('check_password failed - no method found for hash length %s', len(hashed))
 
    return False
 

	
 

	
 
def _cached_perms_data(user_id, user_is_admin):
 
    RK = 'repositories'
 
    GK = 'repositories_groups'
 
    UK = 'user_groups'
 
    GLOBAL = 'global'
 
    PERM_WEIGHTS = Permission.PERM_WEIGHTS
 
    permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()}
 

	
 
    def bump_permission(kind, key, new_perm):
 
        """Add a new permission for kind and key.
 
        Assuming the permissions are comparable, set the new permission if it
 
        has higher weight, else drop it and keep the old permission.
 
        """
 
        cur_perm = permissions[kind][key]
 
        new_perm_val = PERM_WEIGHTS[new_perm]
 
        cur_perm_val = PERM_WEIGHTS[cur_perm]
 
        if new_perm_val > cur_perm_val:
 
            permissions[kind][key] = new_perm
 

	
 
    #======================================================================
 
    # fetch default permissions
 
    #======================================================================
 
    default_user = User.get_by_username('default', cache=True)
 
    default_user_id = default_user.user_id
 

	
 
    default_repo_perms = Permission.get_default_perms(default_user_id)
 
    default_repo_groups_perms = Permission.get_default_group_perms(default_user_id)
 
    default_user_group_perms = Permission.get_default_user_group_perms(default_user_id)
 

	
 
    if user_is_admin:
 
        #==================================================================
 
        # admin users have all rights;
 
        # based on default permissions, just set everything to admin
 
        #==================================================================
 
        permissions[GLOBAL].add('hg.admin')
 
        permissions[GLOBAL].add('hg.create.write_on_repogroup.true')
 

	
 
        # repositories
 
        for perm in default_repo_perms:
 
            r_k = perm.UserRepoToPerm.repository.repo_name
 
            p = 'repository.admin'
 
            permissions[RK][r_k] = p
 

	
 
        # repository groups
 
        for perm in default_repo_groups_perms:
 
            rg_k = perm.UserRepoGroupToPerm.group.group_name
 
            p = 'group.admin'
 
            permissions[GK][rg_k] = p
 

	
 
        # user groups
 
        for perm in default_user_group_perms:
 
            u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
            p = 'usergroup.admin'
 
            permissions[UK][u_k] = p
 
        return permissions
 

	
 
    #==================================================================
 
    # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS
 
    #==================================================================
 

	
 
    # default global permissions taken from the default user
 
    default_global_perms = UserToPerm.query() \
 
        .filter(UserToPerm.user_id == default_user_id) \
 
        .options(joinedload(UserToPerm.permission))
 

	
 
    for perm in default_global_perms:
 
        permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # defaults for repositories, taken from default user
 
    for perm in default_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        if perm.Repository.owner_id == user_id:
 
            p = 'repository.admin'
 
        elif perm.Repository.private:
 
            p = 'repository.none'
 
        else:
 
            p = perm.Permission.permission_name
 
        permissions[RK][r_k] = p
 

	
 
    # defaults for repository groups taken from default user permission
 
    # on given group
 
    for perm in default_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        permissions[GK][rg_k] = p
 

	
 
    # defaults for user groups taken from default user permission
 
    # on given user group
 
    for perm in default_user_group_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        permissions[UK][u_k] = p
 

	
 
    #======================================================================
 
    # !! Augment GLOBALS with user permissions if any found !!
 
    #======================================================================
 

	
 
    # USER GROUPS comes first
 
    # user group global permissions
 
    user_perms_from_users_groups = Session().query(UserGroupToPerm) \
 
        .options(joinedload(UserGroupToPerm.permission)) \
 
        .join((UserGroupMember, UserGroupToPerm.users_group_id ==
 
               UserGroupMember.users_group_id)) \
 
        .filter(UserGroupMember.user_id == user_id) \
 
        .join((UserGroup, UserGroupMember.users_group_id ==
 
               UserGroup.users_group_id)) \
 
        .filter(UserGroup.users_group_active == True) \
 
        .order_by(UserGroupToPerm.users_group_id) \
 
        .all()
 
    # need to group here by groups since user can be in more than
 
    # one group
 
    _grouped = [[x, list(y)] for x, y in
 
                itertools.groupby(user_perms_from_users_groups,
 
                                  lambda x:x.users_group)]
 
    for gr, perms in _grouped:
 
        for perm in perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # user specific global permissions
 
    user_perms = Session().query(UserToPerm) \
 
            .options(joinedload(UserToPerm.permission)) \
 
            .filter(UserToPerm.user_id == user_id).all()
 

	
 
    for perm in user_perms:
 
        permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # for each kind of global permissions, only keep the one with heighest weight
 
    kind_max_perm = {}
 
    for perm in sorted(permissions[GLOBAL], key=lambda n: PERM_WEIGHTS[n]):
 
        kind = perm.rsplit('.', 1)[0]
 
        kind_max_perm[kind] = perm
 
    permissions[GLOBAL] = set(kind_max_perm.values())
 
    ## END GLOBAL PERMISSIONS
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORIES !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository and
 
    # fill in his permission from it.
 
    #======================================================================
 

	
 
    # user group for repositories permissions
 
    user_repo_perms_from_users_groups = \
 
     Session().query(UserGroupRepoToPerm, Permission, Repository,) \
 
        .join((Repository, UserGroupRepoToPerm.repository_id ==
 
               Repository.repo_id)) \
 
        .join((Permission, UserGroupRepoToPerm.permission_id ==
 
               Permission.permission_id)) \
 
        .join((UserGroup, UserGroupRepoToPerm.users_group_id ==
 
               UserGroup.users_group_id)) \
 
        .filter(UserGroup.users_group_active == True) \
 
        .join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
 
               UserGroupMember.users_group_id)) \
 
        .filter(UserGroupMember.user_id == user_id) \
 
        .all()
 

	
 
    for perm in user_repo_perms_from_users_groups:
 
        bump_permission(RK,
 
            perm.UserGroupRepoToPerm.repository.repo_name,
 
            perm.Permission.permission_name)
 

	
 
    # user permissions for repositories
 
    user_repo_perms = Permission.get_default_perms(user_id)
 
    for perm in user_repo_perms:
 
        bump_permission(RK,
 
            perm.UserRepoToPerm.repository.repo_name,
 
            perm.Permission.permission_name)
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORY GROUPS !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository groups and
 
    # fill in his permission from it.
 
    #======================================================================
 
    # user group for repo groups permissions
 
    user_repo_group_perms_from_users_groups = \
 
     Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup) \
 
     .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)) \
 
     .join((Permission, UserGroupRepoGroupToPerm.permission_id
 
            == Permission.permission_id)) \
 
     .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id ==
 
            UserGroup.users_group_id)) \
 
     .filter(UserGroup.users_group_active == True) \
 
     .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
 
            == UserGroupMember.users_group_id)) \
 
     .filter(UserGroupMember.user_id == user_id) \
 
     .all()
 

	
 
    for perm in user_repo_group_perms_from_users_groups:
 
        bump_permission(GK,
 
            perm.UserGroupRepoGroupToPerm.group.group_name,
 
            perm.Permission.permission_name)
 

	
 
    # user explicit permissions for repository groups
 
    user_repo_groups_perms = Permission.get_default_group_perms(user_id)
 
    for perm in user_repo_groups_perms:
 
        bump_permission(GK,
 
            perm.UserRepoGroupToPerm.group.group_name,
 
            perm.Permission.permission_name)
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR USER GROUPS !!
 
    #======================================================================
 
    # user group for user group permissions
 
    user_group_user_groups_perms = \
 
     Session().query(UserGroupUserGroupToPerm, Permission, UserGroup) \
 
     .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
 
            == UserGroup.users_group_id)) \
 
     .join((Permission, UserGroupUserGroupToPerm.permission_id
 
            == Permission.permission_id)) \
 
     .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
 
            == UserGroupMember.users_group_id)) \
 
     .filter(UserGroupMember.user_id == user_id) \
 
     .join((UserGroup, UserGroupMember.users_group_id ==
 
            UserGroup.users_group_id), aliased=True, from_joinpoint=True) \
 
     .filter(UserGroup.users_group_active == True) \
 
     .all()
 

	
 
    for perm in user_group_user_groups_perms:
 
        bump_permission(UK,
 
            perm.UserGroupUserGroupToPerm.target_user_group.users_group_name,
 
            perm.Permission.permission_name)
 

	
 
    # user explicit permission for user groups
 
    user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
 
    for perm in user_user_groups_perms:
 
        bump_permission(UK,
 
            perm.UserUserGroupToPerm.user_group.users_group_name,
 
            perm.Permission.permission_name)
 

	
 
    return permissions
 

	
 

	
 
class AuthUser(object):
 
    """
 
    Represents a Kallithea user, including various authentication and
 
    authorization information. Typically used to store the current user,
 
    but is also used as a generic user information data structure in
 
    parts of the code, e.g. user management.
 

	
 
    Constructed from a database `User` object, a user ID or cookie dict,
 
    it looks up the user (if needed) and copies all attributes to itself,
 
    adding various non-persistent data. If lookup fails but anonymous
 
    access to Kallithea is enabled, the default user is loaded instead.
 

	
 
    `AuthUser` does not by itself authenticate users. It's up to other parts of
 
    the code to check e.g. if a supplied password is correct, and if so, trust
 
    the AuthUser object as an authenticated user.
 

	
 
    However, `AuthUser` does refuse to load a user that is not `active`.
 

	
 
    Note that Kallithea distinguishes between the default user (an actual
 
    user in the database with username "default") and "no user" (no actual
 
    User object, AuthUser filled with blank values and username "None").
 

	
 
    If the default user is active, that will always be used instead of
 
    "no user". On the other hand, if the default user is disabled (and
 
    there is no login information), we instead get "no user"; this should
 
    only happen on the login page (as all other requests are redirected).
 

	
 
    `is_default_user` specifically checks if the AuthUser is the user named
 
    "default". Use `is_anonymous` to check for both "default" and "no user".
 
    """
 

	
 
    @classmethod
 
    def make(cls, dbuser=None, is_external_auth=False, ip_addr=None):
 
        """Create an AuthUser to be authenticated ... or return None if user for some reason can't be authenticated.
 
        Checks that a non-None dbuser is provided, is active, and that the IP address is ok.
 
        """
 
        assert ip_addr is not None
 
        if dbuser is None:
 
            log.info('No db user for authentication')
 
            return None
 
        if not dbuser.active:
 
            log.info('Db user %s not active', dbuser.username)
 
            return None
 
        allowed_ips = AuthUser.get_allowed_ips(dbuser.user_id, cache=True)
 
        if not check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
 
            log.info('Access for %s from %s forbidden - not in %s', dbuser.username, ip_addr, allowed_ips)
 
            return None
 
        return cls(dbuser=dbuser, is_external_auth=is_external_auth)
 

	
 
    def __init__(self, user_id=None, dbuser=None, is_external_auth=False):
 
        self.is_external_auth = is_external_auth # container auth - don't show logout option
 

	
 
        # These attributes will be overridden below if the requested user is
 
        # found or anonymous access (using the default user) is enabled.
 
        self.user_id = None
 
        self.username = None
 
        self.api_key = None
 
        self.name = ''
 
        self.lastname = ''
 
        self.email = ''
 
        self.admin = False
 

	
 
        # Look up database user, if necessary.
 
        if user_id is not None:
 
            assert dbuser is None
 
            log.debug('Auth User lookup by USER ID %s', user_id)
 
            dbuser = UserModel().get(user_id)
 
            assert dbuser is not None
 
        else:
 
            assert dbuser is not None
 
            log.debug('Auth User lookup by database user %s', dbuser)
 

	
 
        log.debug('filling %s data', dbuser)
 
        self.is_anonymous = dbuser.is_default_user
 
        if dbuser.is_default_user and not dbuser.active:
 
            self.username = 'None'
 
            self.is_default_user = False
 
        else:
kallithea/lib/vcs/backends/base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.base
 
    ~~~~~~~~~~~~~~~~~
 

	
 
    Base for all available scm backends
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import datetime
 
import itertools
 

	
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import (
 
    ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
 
from kallithea.lib.vcs.exceptions import (ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError,
 
                                          NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, RepositoryError)
 
from kallithea.lib.vcs.utils import author_email, author_name
 
from kallithea.lib.vcs.utils.helpers import get_dict_for_attrs
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class BaseRepository(object):
 
    """
 
    Base Repository for final backends
 

	
 
    **Attributes**
 

	
 
        ``DEFAULT_BRANCH_NAME``
 
            name of default branch (i.e. "trunk" for svn, "master" for git etc.
 

	
 
        ``scm``
 
            alias of scm, i.e. *git* or *hg*
 

	
 
        ``repo``
 
            object from external api
 

	
 
        ``revisions``
 
            list of all available revisions' ids, in ascending order
 

	
 
        ``changesets``
 
            storage dict caching returned changesets
 

	
 
        ``path``
 
            absolute path to the repository
 

	
 
        ``branches``
 
            branches as list of changesets
 

	
 
        ``tags``
 
            tags as list of changesets
 
    """
 
    scm = None
 
    DEFAULT_BRANCH_NAME = None
 
    EMPTY_CHANGESET = '0' * 40
 

	
 
    def __init__(self, repo_path, create=False, **kwargs):
 
        """
 
        Initializes repository. Raises RepositoryError if repository could
 
        not be find at the given ``repo_path`` or directory at ``repo_path``
 
        exists and ``create`` is set to True.
 

	
 
        :param repo_path: local path of the repository
 
        :param create=False: if set to True, would try to create repository.
 
        :param src_url=None: if set, should be proper url from which repository
 
          would be cloned; requires ``create`` parameter to be set to True -
 
          raises RepositoryError if src_url is set and create evaluates to
 
          False
 
        """
 
        raise NotImplementedError
 

	
 
    def __str__(self):
 
        return '<%s at %s>' % (self.__class__.__name__, self.path)
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __len__(self):
 
        return self.count()
 

	
 
    def __eq__(self, other):
 
        same_instance = isinstance(other, self.__class__)
 
        return same_instance and getattr(other, 'path', None) == self.path
 

	
 
    def __ne__(self, other):
 
        return not self.__eq__(other)
 

	
 
    @LazyProperty
 
    def alias(self):
 
        for k, v in settings.BACKENDS.items():
 
            if v.split('.')[-1] == str(self.__class__.__name__):
 
                return k
 

	
 
    @LazyProperty
 
    def name(self):
 
        """
 
        Return repository name (without group name)
 
        """
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def owner(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def description(self):
 
        raise NotImplementedError
 

	
 
    @LazyProperty
 
    def size(self):
 
        """
 
        Returns combined size in bytes for all repository files
 
        """
 

	
 
        size = 0
 
        try:
 
            tip = self.get_changeset()
 
            for topnode, dirs, files in tip.walk('/'):
 
                for f in files:
 
                    size += tip.get_file_size(f.path)
 

	
 
        except RepositoryError as e:
 
            pass
 
        return size
 

	
 
    def is_valid(self):
 
        """
 
        Validates repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def is_empty(self):
 
        return self._empty
 

	
 
    #==========================================================================
 
    # CHANGESETS
 
    #==========================================================================
 

	
 
    def get_changeset(self, revision=None):
 
        """
 
        Returns instance of ``Changeset`` class. If ``revision`` is None, most
 
        recent changeset is returned.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no revisions
 
        """
 
        raise NotImplementedError
 

	
 
    def __iter__(self):
 
        """
 
        Allows Repository objects to be iterated.
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for revision in self.revisions:
 
            yield self.get_changeset(revision)
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
                       end_date=None, branch_name=None, reverse=False, max_revisions=None):
 
        """
 
        Returns iterator of ``BaseChangeset`` objects from start to end,
 
        both inclusive.
 

	
 
        :param start: None or str
 
        :param end: None or str
 
        :param start_date:
 
        :param end_date:
 
        :param branch_name:
 
        :param reversed:
 
        """
 
        raise NotImplementedError
 

	
 
    def __getitem__(self, key):
 
        if isinstance(key, slice):
 
            return (self.get_changeset(rev) for rev in self.revisions[key])
 
        return self.get_changeset(key)
 

	
 
    def count(self):
 
        return len(self.revisions)
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None, **opts):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        raise NotImplementedError
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        raise NotImplementedError
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
            context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``.
 
        """
 
        raise NotImplementedError
 

	
 
    # ========== #
 
    # COMMIT API #
 
    # ========== #
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        """
 
        Returns ``InMemoryChangeset`` object for this repository.
 
        """
 
        raise NotImplementedError
 

	
 
    def add(self, filenode, **kwargs):
 
        """
 
        Commit api function that will add given ``FileNode`` into this
 
        repository.
 

	
 
        :raises ``NodeAlreadyExistsError``: if there is a file with same path
 
          already in repository
 
        :raises ``NodeAlreadyAddedError``: if given node is already marked as
 
          *added*
 
        """
 
        raise NotImplementedError
 

	
 
    def remove(self, filenode, **kwargs):
 
        """
 
        Commit api function that will remove given ``FileNode`` into this
 
        repository.
 

	
 
        :raises ``EmptyRepositoryError``: if there are no changesets yet
 
        :raises ``NodeDoesNotExistError``: if there is no file with given path
 
        """
 
        raise NotImplementedError
 

	
 
    def commit(self, message, **kwargs):
 
        """
 
        Persists current changes made on this repository and returns newly
 
        created changeset.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_state(self):
 
        """
 
        Returns dictionary with ``added``, ``changed`` and ``removed`` lists
 
        containing ``FileNode`` objects.
 
        """
 
        raise NotImplementedError
 

	
 
    def get_config_value(self, section, name, config_file=None):
 
        """
 
        Returns configuration value for a given [``section``] and ``name``.
 

	
 
        :param section: Section we want to retrieve value from
 
        :param name: Name of configuration we want to retrieve
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    def get_user_name(self, config_file=None):
 
        """
 
        Returns user's name from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    def get_user_email(self, config_file=None):
 
        """
 
        Returns user's email from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        raise NotImplementedError
 

	
 
    # =========== #
 
    # WORKDIR API #
 
    # =========== #
 

	
 
    @LazyProperty
 
    def workdir(self):
 
        """
 
        Returns ``Workdir`` instance for this repository.
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class BaseChangeset(object):
 
    """
 
    Each backend should implement it's changeset representation.
 

	
 
    **Attributes**
 

	
 
        ``repository``
 
            repository object within which changeset exists
 

	
 
        ``raw_id``
 
            raw changeset representation (i.e. full 40 length sha for git
 
            backend)
 

	
 
        ``short_id``
 
            shortened (if apply) version of ``raw_id``; it would be simple
 
            shortcut for ``raw_id[:12]`` for git/mercurial backends or same
 
            as ``raw_id`` for subversion
 

	
 
        ``revision``
 
            revision number as integer
 

	
 
        ``files``
 
            list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
 

	
 
        ``dirs``
 
            list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
 

	
 
        ``nodes``
 
            combined list of ``Node`` objects
 

	
 
        ``author``
 
            author of the changeset, as str
 

	
 
        ``message``
 
            message of the changeset, as str
 

	
 
        ``parents``
 
            list of parent changesets
 

	
 
        ``last``
 
            ``True`` if this is last changeset in repository, ``False``
 
            otherwise; trying to access this attribute while there is no
 
            changesets would raise ``EmptyRepositoryError``
 
    """
 
    def __str__(self):
 
        return '<%s at %s:%s>' % (self.__class__.__name__, self.revision,
 
            self.short_id)
 

	
 
    def __repr__(self):
 
        return self.__str__()
 

	
 
    def __eq__(self, other):
 
        if type(self) is not type(other):
 
            return False
 
        return self.raw_id == other.raw_id
 

	
 
    def __json__(self, with_file_list=False):
 
        if with_file_list:
 
            return dict(
 
                short_id=self.short_id,
 
                raw_id=self.raw_id,
 
                revision=self.revision,
 
                message=self.message,
 
                date=self.date,
 
                author=self.author,
 
                added=[el.path for el in self.added],
 
                changed=[el.path for el in self.changed],
 
                removed=[el.path for el in self.removed],
 
            )
 
        else:
 
            return dict(
 
                short_id=self.short_id,
 
                raw_id=self.raw_id,
 
                revision=self.revision,
 
                message=self.message,
 
                date=self.date,
 
                author=self.author,
 
            )
 

	
 
    @LazyProperty
 
    def last(self):
 
        if self.repository is None:
 
            raise ChangesetError("Cannot check if it's most recent revision")
 
        return self.raw_id == self.repository.revisions[-1]
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
kallithea/lib/vcs/backends/git/changeset.py
Show inline comments
 
import re
 
from io import BytesIO
 
from itertools import chain
 
from subprocess import PIPE, Popen
 

	
 
from dulwich import objects
 
from dulwich.config import ConfigFile
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, RepositoryError, VCSError
 
from kallithea.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
 
from kallithea.lib.vcs.nodes import (AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode,
 
                                     SubModuleNode)
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class GitChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at a revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self._stat_modes = {}
 
        self.repository = repository
 
        try:
 
            commit = self.repository._repo[ascii_bytes(revision)]
 
            if isinstance(commit, objects.Tag):
 
                revision = safe_str(commit.object[1])
 
                commit = self.repository._repo.get_object(commit.object[1])
 
        except KeyError:
 
            raise RepositoryError("Cannot get object with id %s" % revision)
 
        self.raw_id = ascii_str(commit.id)
 
        self.short_id = self.raw_id[:12]
 
        self._commit = commit  # a Dulwich Commmit with .id
 
        self._tree_id = commit.tree
 
        self._committer_property = 'committer'
 
        self._author_property = 'author'
 
        self._date_property = 'commit_time'
 
        self._date_tz_property = 'commit_timezone'
 
        self.revision = repository.revisions.index(self.raw_id)
 

	
 
        self.nodes = {}
 
        self._paths = {}
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        return ()
 

	
 
    @LazyProperty
 
    def message(self):
 
        return safe_str(self._commit.message)
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_str(getattr(self._commit, self._committer_property))
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_str(getattr(self._commit, self._author_property))
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(getattr(self._commit, self._date_property),
 
                                  getattr(self._commit, self._date_tz_property))
 

	
 
    @LazyProperty
 
    def _timestamp(self):
 
        return getattr(self._commit, self._date_property)
 

	
 
    @LazyProperty
 
    def status(self):
 
        """
 
        Returns modified, added, removed, deleted files for current changeset
 
        """
 
        return self.changed, self.added, self.removed
 

	
 
    @LazyProperty
 
    def tags(self):
 
        _tags = []
 
        for tname, tsha in self.repository.tags.items():
 
            if tsha == self.raw_id:
 
                _tags.append(tname)
 
        return _tags
 

	
 
    @LazyProperty
 
    def branch(self):
 
        # Note: This function will return one branch name for the changeset -
 
        # that might not make sense in Git where branches() is a better match
 
        # for the basic model
 
        heads = self.repository._heads(reverse=False)
 
        ref = heads.get(self._commit.id)
 
        if ref:
 
            return safe_str(ref)
 

	
 
    @LazyProperty
 
    def branches(self):
 
        heads = self.repository._heads(reverse=True)
 
        return [safe_str(b) for b in heads if heads[b] == self._commit.id] # FIXME: Inefficient ... and returning None!
 

	
 
    def _get_id_for_path(self, path):
 
        # FIXME: Please, spare a couple of minutes and make those codes cleaner;
 
        if path not in self._paths:
 
            path = path.strip('/')
 
            # set root tree
 
            tree = self.repository._repo[self._tree_id]
 
            if path == '':
 
                self._paths[''] = tree.id
 
                return tree.id
 
            splitted = path.split('/')
 
            dirs, name = splitted[:-1], splitted[-1]
 
            curdir = ''
 

	
 
            # initially extract things from root dir
 
            for item, stat, id in tree.items():
 
                name = safe_str(item)
 
                if curdir:
 
                    name = '/'.join((curdir, name))
 
                self._paths[name] = id
 
                self._stat_modes[name] = stat
 

	
 
            for dir in dirs:
 
                if curdir:
 
                    curdir = '/'.join((curdir, dir))
 
                else:
 
                    curdir = dir
 
                dir_id = None
 
                for item, stat, id in tree.items():
 
                    name = safe_str(item)
 
                    if dir == name:
 
                        dir_id = id
 
                if dir_id:
 
                    # Update tree
 
                    tree = self.repository._repo[dir_id]
 
                    if not isinstance(tree, objects.Tree):
 
                        raise ChangesetError('%s is not a directory' % curdir)
 
                else:
 
                    raise ChangesetError('%s have not been found' % curdir)
 

	
 
                # cache all items from the given traversed tree
 
                for item, stat, id in tree.items():
 
                    name = safe_str(item)
 
                    if curdir:
 
                        name = '/'.join((curdir, name))
 
                    self._paths[name] = id
 
                    self._stat_modes[name] = stat
 
            if path not in self._paths:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path '%s' at revision %s"
 
                    % (path, self.short_id))
 
        return self._paths[path]
 

	
 
    def _get_kind(self, path):
 
        obj = self.repository._repo[self._get_id_for_path(path)]
 
        if isinstance(obj, objects.Blob):
 
            return NodeKind.FILE
 
        elif isinstance(obj, objects.Tree):
 
            return NodeKind.DIR
 

	
 
    def _get_filectx(self, path):
 
        path = path.rstrip('/')
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return path
 

	
 
    def _get_file_nodes(self):
 
        return chain(*(t[2] for t in self.walk()))
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(ascii_str(parent_id))
 
                for parent_id in self._commit.parents]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        rev_filter = settings.GIT_REV_FILTER
 
        so = self.repository.run_git_command(
 
            ['rev-list', rev_filter, '--children']
 
        )
 
        return [
 
            self.repository.get_changeset(cs)
 
            for parts in (l.split(' ') for l in so.splitlines())
 
            if parts[0] == self.raw_id
 
            for cs in parts[1:]
 
        ]
 

	
 
    def next(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                next_ = cs.revision + 1
 
                next_rev = cs.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(next_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                prev_ = cs.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self, ignore_whitespace=True, context=3):
 
        # Only used to feed diffstat
 
        rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
 
        rev2 = self
 
        return b''.join(self.repository.get_diff(rev1, rev2,
 
                                    ignore_whitespace=ignore_whitespace,
 
                                    context=context))
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        # ensure path is traversed
 
        self._get_id_for_path(path)
 
        return self._stat_modes[path]
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.as_pretty_string()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.raw_length()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        return self.get_file_history(path, limit=1)[0]
 

	
 
    def get_file_history(self, path, limit=None):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        TODO: This function now uses os underlying 'git' and 'grep' commands
 
        which is generally not good. Should be replaced with algorithm
 
        iterating commits.
 
        """
 
        self._get_filectx(path)
 

	
 
        if limit is not None:
 
            cmd = ['log', '-n', str(safe_int(limit, 0)),
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 

	
 
        else:
 
            cmd = ['log',
 
                   '--pretty=format:%H', '-s', self.raw_id, '--', path]
 
        so = self.repository.run_git_command(cmd)
 
        ids = re.findall(r'[0-9a-fA-F]{40}', so)
 
        return [self.repository.get_changeset(sha) for sha in ids]
 

	
 
    def get_file_history_2(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        """
 
        self._get_filectx(path)
 
        from dulwich.walk import Walker
 
        include = [self.raw_id]
 
        walker = Walker(self.repository._repo.object_store, include,
 
                        paths=[path], max_entries=1)
 
        return [self.repository.get_changeset(ascii_str(x.commit.id.decode))
 
                for x in walker]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 
        """
 
        # TODO: This function now uses os underlying 'git' command which is
 
        # generally not good. Should be replaced with algorithm iterating
 
        # commits.
 
        cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path]
 
        # -l     ==> outputs long shas (and we need all 40 characters)
 
        # --root ==> doesn't put '^' character for boundaries
 
        # -r sha ==> blames for the given revision
 
        so = self.repository.run_git_command(cmd)
 

	
 
        for i, blame_line in enumerate(so.split('\n')[:-1]):
 
            sha, line = re.split(r' ', blame_line, 1)
 
            yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 
        """
 
        allowed_kinds = settings.ARCHIVE_SPECS
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s' % ' '.join(allowed_kinds))
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        if kind == 'zip':
 
            frmt = 'zip'
 
        else:
 
            frmt = 'tar'
 
        _git_path = settings.GIT_EXECUTABLE_PATH
 
        cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
 
                                                frmt, prefix, self.raw_id)
 
        if kind == 'tgz':
 
            cmd += ' | gzip -9'
 
        elif kind == 'tbz2':
 
            cmd += ' | bzip2 -9'
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 
        popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
 
                      cwd=self.repository.path)
 

	
 
        buffer_size = 1024 * 8
 
        chunk = popen.stdout.read(buffer_size)
 
        while chunk:
 
            stream.write(chunk)
 
            chunk = popen.stdout.read(buffer_size)
 
        # Make sure all descriptors would be read
 
        popen.communicate()
 

	
 
    def get_nodes(self, path):
 
        """
 
        Returns combined ``DirNode`` and ``FileNode`` objects list representing
 
        state of changeset at the given ``path``. If node at the given ``path``
 
        is not instance of ``DirNode``, ChangesetError would be raised.
 
        """
 

	
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %s at "
 
                " '%s'" % (self.revision, path))
 
        path = path.rstrip('/')
 
        id = self._get_id_for_path(path)
 
        tree = self.repository._repo[id]
 
        dirnodes = []
 
        filenodes = []
 
        als = self.repository.alias
 
        for name, stat, id in tree.items():
 
            obj_path = safe_str(name)
kallithea/lib/vcs/backends/git/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.git.repository
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Git repository implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import errno
 
import logging
 
import os
 
import re
 
import time
 
import urllib.error
 
import urllib.parse
 
import urllib.request
 
from collections import OrderedDict
 

	
 
import mercurial.url  # import httpbasicauthhandler, httpdigestauthhandler
 
import mercurial.util  # import url as hg_url
 
from dulwich.config import ConfigFile
 
from dulwich.objects import Tag
 
from dulwich.repo import NotGitRepository, Repo
 

	
 
from kallithea.lib.vcs import subprocessio
 
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError)
 
from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
 
                                          TagDoesNotExistError)
 
from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import abspath, get_user_home
 

	
 
from .changeset import GitChangeset
 
from .inmemory import GitInMemoryChangeset
 
from .workdir import GitWorkdir
 

	
 

	
 
SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitRepository(BaseRepository):
 
    """
 
    Git repository backend.
 
    """
 
    DEFAULT_BRANCH_NAME = 'master'
 
    scm = 'git'
 

	
 
    def __init__(self, repo_path, create=False, src_url=None,
 
                 update_after_clone=False, bare=False):
 

	
 
        self.path = abspath(repo_path)
 
        self.repo = self._get_repo(create, src_url, update_after_clone, bare)
 
        self.bare = self.repo.bare
 

	
 
    @property
 
    def _config_files(self):
 
        return [
 
            self.bare and abspath(self.path, 'config')
 
                      or abspath(self.path, '.git', 'config'),
 
             abspath(get_user_home(), '.gitconfig'),
 
         ]
 

	
 
    @property
 
    def _repo(self):
 
        return self.repo
 

	
 
    @property
 
    def head(self):
 
        try:
 
            return self._repo.head()
 
        except KeyError:
 
            return None
 

	
 
    @property
 
    def _empty(self):
 
        """
 
        Checks if repository is empty ie. without any changesets
 
        """
 

	
 
        try:
 
            self.revisions[0]
 
        except (KeyError, IndexError):
 
            return True
 
        return False
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
        """
 
        return self._get_all_revisions()
 

	
 
    @classmethod
 
    def _run_git_command(cls, cmd, cwd=None):
 
        """
 
        Runs given ``cmd`` as git command and returns output bytes in a tuple
 
        (stdout, stderr) ... or raise RepositoryError.
 

	
 
        :param cmd: git command to be executed
 
        :param cwd: passed directly to subprocess
 
        """
 
        # need to clean fix GIT_DIR !
 
        gitenv = dict(os.environ)
 
        gitenv.pop('GIT_DIR', None)
 
        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 

	
 
        assert isinstance(cmd, list), cmd
 
        cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd
 
        try:
 
            p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False)
 
        except (EnvironmentError, OSError) as err:
 
            # output from the failing process is in str(EnvironmentError)
 
            msg = ("Couldn't run git command %s.\n"
 
                   "Subprocess failed with '%s': %s\n" %
 
                   (cmd, type(err).__name__, err)
 
            ).strip()
 
            log.error(msg)
 
            raise RepositoryError(msg)
 

	
 
        try:
 
            stdout = b''.join(p.output)
 
            stderr = b''.join(p.error)
 
        finally:
 
            p.close()
 
        # TODO: introduce option to make commands fail if they have any stderr output?
 
        if stderr:
 
            log.debug('stderr from %s:\n%s', cmd, stderr)
 
        else:
 
            log.debug('stderr from %s: None', cmd)
 
        return stdout, stderr
 

	
 
    def run_git_command(self, cmd):
 
        """
 
        Runs given ``cmd`` as git command with cwd set to current repo.
 
        Returns stdout as unicode str ... or raise RepositoryError.
 
        """
 
        cwd = None
 
        if os.path.isdir(self.path):
 
            cwd = self.path
 
        stdout, _stderr = self._run_git_command(cmd, cwd=cwd)
 
        return safe_str(stdout)
 

	
 
    @classmethod
 
    def _check_url(cls, url):
 
        """
 
        Function will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that git will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError, exception is also thrown
 
        when the return code is non 200
 
        """
 
        # check first if it's not an local url
 
        if os.path.isdir(url) or url.startswith('file:'):
 
            return True
 

	
 
        if url.startswith('git://'):
 
            return True
 

	
 
        if '+' in url[:url.find('://')]:
 
            url = url[url.find('+') + 1:]
 

	
 
        handlers = []
 
        url_obj = mercurial.util.url(safe_bytes(url))
 
        test_uri, authinfo = url_obj.authinfo()
 
        if not test_uri.endswith(b'info/refs'):
 
            test_uri = test_uri.rstrip(b'/') + b'/info/refs'
 

	
 
        url_obj.passwd = b'*****'
 
        cleaned_uri = str(url_obj)
 

	
 
        if authinfo:
 
            # create a password manager
 
            passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
 
                             mercurial.url.httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib.request.build_opener(*handlers)
 
        o.addheaders = [('User-Agent', 'git/1.7.8.0')]  # fake some git
 

	
 
        req = urllib.request.Request(
 
            "%s?%s" % (
 
                test_uri,
 
                urllib.parse.urlencode({"service": 'git-upload-pack'})
 
            ))
 

	
 
        try:
 
            resp = o.open(req)
 
            if resp.code != 200:
 
                raise Exception('Return Code is not 200')
 
        except Exception as e:
 
            # means it cannot be cloned
 
            raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 

	
 
        # now detect if it's proper git repo
 
        gitdata = resp.read()
 
        if b'service=git-upload-pack' not in gitdata:
 
            raise urllib.error.URLError(
 
                "url [%s] does not look like an git" % cleaned_uri)
 

	
 
        return True
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False,
 
                  bare=False):
 
        if create and os.path.exists(self.path):
 
            raise RepositoryError("Location already exist")
 
        if src_url and not create:
 
            raise RepositoryError("Create should be set to True if src_url is "
 
                                  "given (clone operation creates repository)")
 
        try:
 
            if create and src_url:
 
                GitRepository._check_url(src_url)
 
                self.clone(src_url, update_after_clone, bare)
 
                return Repo(self.path)
 
            elif create:
 
                os.makedirs(self.path)
 
                if bare:
 
                    return Repo.init_bare(self.path)
 
                else:
 
                    return Repo.init(self.path)
 
            else:
 
                return Repo(self.path)
 
        except (NotGitRepository, OSError) as err:
 
            raise RepositoryError(err)
 

	
 
    def _get_all_revisions(self):
 
        # we must check if this repo is not empty, since later command
 
        # fails if it is. And it's cheaper to ask than throw the subprocess
 
        # errors
 
        try:
 
            self._repo.head()
 
        except KeyError:
 
            return []
 

	
 
        rev_filter = settings.GIT_REV_FILTER
 
        cmd = ['rev-list', rev_filter, '--reverse', '--date-order']
 
        try:
 
            so = self.run_git_command(cmd)
 
        except RepositoryError:
 
            # Can be raised for empty repositories
 
            return []
 
        return so.splitlines()
 

	
 
    def _get_all_revisions2(self):
 
        # alternate implementation using dulwich
 
        includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items()
 
                    if type_ != b'T']
 
        return [c.commit.id for c in self._repo.get_walker(include=includes)]
 

	
 
    def _get_revision(self, revision):
 
        """
 
        Given any revision identifier, returns a 40 char string with revision hash.
 
        """
 
        if self._empty:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in (None, '', 'tip', 'HEAD', 'head', -1):
 
            revision = -1
 

	
 
        if isinstance(revision, int):
 
            try:
 
                return self.revisions[revision]
 
            except IndexError:
 
                msg = "Revision %r does not exist for %s" % (revision, self.name)
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        if isinstance(revision, str):
 
            if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')):
 
                try:
 
                    return self.revisions[int(revision)]
 
                except IndexError:
 
                    msg = "Revision %r does not exist for %s" % (revision, self)
 
                    raise ChangesetDoesNotExistError(msg)
 

	
 
            # get by branch/tag name
 
            _ref_revision = self._parsed_refs.get(safe_bytes(revision))
 
            if _ref_revision:  # and _ref_revision[1] in [b'H', b'RH', b'T']:
 
                return ascii_str(_ref_revision[0])
 

	
 
            if revision in self.revisions:
 
                return revision
 

	
 
            # maybe it's a tag ? we don't have them in self.revisions
 
            if revision in self.tags.values():
 
                return revision
 

	
 
            if SHA_PATTERN.match(revision):
 
                msg = "Revision %r does not exist for %s" % (revision, self.name)
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision)
 

	
 
    def get_ref_revision(self, ref_type, ref_name):
 
        """
 
        Returns ``GitChangeset`` object representing repository's
 
        changeset at the given ``revision``.
 
        """
 
        return self._get_revision(ref_name)
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
            yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        if url != 'default' and '://' not in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            idx_loc = '' if self.bare else '.git'
 
            # fallback to filesystem
 
            in_path = os.path.join(self.path, idx_loc, "index")
 
            he_path = os.path.join(self.path, idx_loc, "HEAD")
 
            if os.path.exists(in_path):
 
                return os.stat(in_path).st_mtime
 
            else:
 
                return os.stat(he_path).st_mtime
 

	
 
    @LazyProperty
 
    def description(self):
 
        return safe_str(self._repo.get_description() or b'unknown')
 

	
 
    @LazyProperty
 
    def contact(self):
 
        undefined_contact = 'Unknown'
 
        return undefined_contact
 

	
 
    @property
 
    def branches(self):
 
        if not self.revisions:
 
            return {}
 
        _branches = [(safe_str(key), ascii_str(sha))
 
                     for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H']
 
        return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False))
 

	
 
    @LazyProperty
 
    def closed_branches(self):
 
        return {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return self._get_tags()
 

	
 
    def _get_tags(self):
 
        if not self.revisions:
 
            return {}
 
        _tags = [(safe_str(key), ascii_str(sha))
 
                 for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T']
 
        return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True))
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None,
 
            **kwargs):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        if name in self.tags:
 
            raise TagAlreadyExistError("Tag %s already exists" % name)
 
        changeset = self.get_changeset(revision)
 
        message = message or "Added tag %s for commit %s" % (name,
 
            changeset.raw_id)
 
        self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id
 

	
 
        self._parsed_refs = self._get_parsed_refs()
 
        self.tags = self._get_tags()
 
        return changeset
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        if name not in self.tags:
 
            raise TagDoesNotExistError("Tag %s does not exist" % name)
kallithea/lib/vcs/backends/hg/changeset.py
Show inline comments
 
import os
 
import posixpath
 

	
 
import mercurial.archival
 
import mercurial.node
 
import mercurial.obsutil
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError
 
from kallithea.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode)
 
from kallithea.lib.vcs.nodes import (AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode,
 
                                     SubModuleNode)
 
from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_bytes, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import get_dirs_for_path
 

	
 

	
 
class MercurialChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at a revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self.repository = repository
 
        assert isinstance(revision, str), repr(revision)
 
        self._ctx = repository._repo[ascii_bytes(revision)]
 
        self.raw_id = ascii_str(self._ctx.hex())
 
        self.revision = self._ctx._rev
 
        self.nodes = {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return [safe_str(tag) for tag in self._ctx.tags()]
 

	
 
    @LazyProperty
 
    def branch(self):
 
        return safe_str(self._ctx.branch())
 

	
 
    @LazyProperty
 
    def branches(self):
 
        return [safe_str(self._ctx.branch())]
 

	
 
    @LazyProperty
 
    def closesbranch(self):
 
        return self._ctx.closesbranch()
 

	
 
    @LazyProperty
 
    def obsolete(self):
 
        return self._ctx.obsolete()
 

	
 
    @LazyProperty
 
    def bumped(self):
 
        return self._ctx.phasedivergent()
 

	
 
    @LazyProperty
 
    def divergent(self):
 
        return self._ctx.contentdivergent()
 

	
 
    @LazyProperty
 
    def extinct(self):
 
        return self._ctx.extinct()
 

	
 
    @LazyProperty
 
    def unstable(self):
 
        return self._ctx.orphan()
 

	
 
    @LazyProperty
 
    def phase(self):
 
        if(self._ctx.phase() == 1):
 
            return 'Draft'
 
        elif(self._ctx.phase() == 2):
 
            return 'Secret'
 
        else:
 
            return ''
 

	
 
    @LazyProperty
 
    def successors(self):
 
        successors = mercurial.obsutil.successorssets(self._ctx._repo, self._ctx.node(), closest=True)
 
        if successors:
 
            # flatten the list here handles both divergent (len > 1)
 
            # and the usual case (len = 1)
 
            successors = [mercurial.node.hex(n)[:12] for sub in successors for n in sub if n != self._ctx.node()]
 

	
 
        return successors
 

	
 
    @LazyProperty
 
    def predecessors(self):
 
        return [mercurial.node.hex(n)[:12] for n in mercurial.obsutil.closestpredecessors(self._ctx._repo, self._ctx.node())]
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        return [safe_str(bookmark) for bookmark in self._ctx.bookmarks()]
 

	
 
    @LazyProperty
 
    def message(self):
 
        return safe_str(self._ctx.description())
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_str(self.author)
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_str(self._ctx.user())
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(*self._ctx.date())
 

	
 
    @LazyProperty
 
    def _timestamp(self):
 
        return self._ctx.date()[0]
 

	
 
    @LazyProperty
 
    def status(self):
 
        """
 
        Returns modified, added, removed, deleted files for current changeset
 
        """
 
        return self.repository._repo.status(self._ctx.p1().node(),
 
                                            self._ctx.node())
 

	
 
    @LazyProperty
 
    def _file_paths(self):
 
        return list(safe_str(f) for f in self._ctx)
 

	
 
    @LazyProperty
 
    def _dir_paths(self):
 
        p = list(set(get_dirs_for_path(*self._file_paths)))
 
        p.insert(0, '')
 
        return p
 

	
 
    @LazyProperty
 
    def _paths(self):
 
        return self._dir_paths + self._file_paths
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(parent.rev())
 
                for parent in self._ctx.parents() if parent.rev() >= 0]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        return [self.repository.get_changeset(child.rev())
 
                for child in self._ctx.children() if child.rev() >= 0]
 

	
 
    def next(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                next_ = cs.repository.revisions.index(cs.raw_id) + 1
 
                next_rev = cs.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(next_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                prev_ = cs.repository.revisions.index(cs.raw_id) - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self):
 
        # Only used to feed diffstat
 
        return b''.join(self._ctx.diff())
 

	
 
    def _get_kind(self, path):
 
        path = path.rstrip('/')
 
        if path in self._file_paths:
 
            return NodeKind.FILE
 
        elif path in self._dir_paths:
 
            return NodeKind.DIR
 
        else:
 
            raise ChangesetError("Node does not exist at the given path '%s'"
 
                % (path))
 

	
 
    def _get_filectx(self, path):
 
        path = path.rstrip('/')
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return self._ctx.filectx(safe_bytes(path))
 

	
 
    def _extract_submodules(self):
 
        """
 
        returns a dictionary with submodule information from substate file
 
        of hg repository
 
        """
 
        return self._ctx.substate
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        fctx = self._get_filectx(path)
 
        if b'x' in fctx.flags():
 
            return 0o100755
 
        else:
 
            return 0o100644
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        fctx = self._get_filectx(path)
 
        return fctx.data()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        fctx = self._get_filectx(path)
 
        return fctx.size()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        return self.get_file_history(path, limit=1)[0]
 

	
 
    def get_file_history(self, path, limit=None):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 
        """
 
        fctx = self._get_filectx(path)
 
        hist = []
 
        cnt = 0
 
        for cs in reversed([x for x in fctx.filelog()]):
 
            cnt += 1
 
            hist.append(mercurial.node.hex(fctx.filectx(cs).node()))
 
            if limit is not None and cnt == limit:
 
                break
 

	
 
        return [self.repository.get_changeset(node) for node in hist]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 
        """
 
        annotations = self._get_filectx(path).annotate()
 
        annotation_lines = [(annotateline.fctx, annotateline.text) for annotateline in annotations]
 
        for i, (fctx, line) in enumerate(annotation_lines):
 
            sha = ascii_str(fctx.hex())
 
            yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 
        """
 
        allowed_kinds = settings.ARCHIVE_SPECS
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s' % ' '.join(allowed_kinds))
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        mercurial.archival.archive(self.repository._repo, stream, ascii_bytes(self.raw_id),
 
                         safe_bytes(kind), prefix=safe_bytes(prefix), subrepos=subrepos)
 

	
 
    def get_nodes(self, path):
 
        """
 
        Returns combined ``DirNode`` and ``FileNode`` objects list representing
 
        state of changeset at the given ``path``. If node at the given ``path``
 
        is not instance of ``DirNode``, ChangesetError would be raised.
 
        """
 

	
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %s at "
 
                " '%s'" % (self.revision, path))
 
        path = path.rstrip('/')
 
        filenodes = [FileNode(f, changeset=self) for f in self._file_paths
 
            if os.path.dirname(f) == path]
 
        dirs = path == '' and '' or [d for d in self._dir_paths
 
            if d and posixpath.dirname(d) == path]
 
        dirnodes = [DirNode(d, changeset=self) for d in dirs
 
            if os.path.dirname(d) == path]
 

	
 
        als = self.repository.alias
 
        for k, vals in self._extract_submodules().items():
 
            #vals = url,rev,type
 
            loc = vals[0]
 
            cs = vals[1]
 
            dirnodes.append(SubModuleNode(k, url=loc, changeset=cs,
 
                                          alias=als))
 
        nodes = dirnodes + filenodes
 
        for node in nodes:
 
            self.nodes[node.path] = node
 
        nodes.sort()
 
        return nodes
 

	
 
    def get_node(self, path):
 
        """
 
        Returns ``Node`` object from the given ``path``. If there is no node at
 
        the given ``path``, ``ChangesetError`` would be raised.
 
        """
 
        path = path.rstrip('/')
 
        if path not in self.nodes:
 
            if path in self._file_paths:
 
                node = FileNode(path, changeset=self)
 
            elif path in self._dir_paths or path in self._dir_paths:
 
                if path == '':
 
                    node = RootNode(changeset=self)
 
                else:
 
                    node = DirNode(path, changeset=self)
 
            else:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path: '%s' at revision %s"
 
                    % (path, self.short_id))
 
            # cache node
 
            self.nodes[path] = node
 
        return self.nodes[path]
 

	
 
    @LazyProperty
 
    def affected_files(self):
 
        """
 
        Gets a fast accessible file changes for given changeset
 
        """
 
        return self._ctx.files()
 

	
 
    @property
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        return AddedFileNodesGenerator([safe_str(n) for n in self.status.added], self)
 

	
 
    @property
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        return ChangedFileNodesGenerator([safe_str(n) for n in self.status.modified], self)
 

	
 
    @property
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        return RemovedFileNodesGenerator([safe_str(n) for n in self.status.removed], self)
 

	
 
    @LazyProperty
 
    def extra(self):
 
        return self._ctx.extra()
kallithea/lib/vcs/backends/hg/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.hg.repository
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Mercurial repository implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import datetime
 
import logging
 
import os
 
import time
 
import urllib.error
 
import urllib.parse
 
import urllib.request
 
from collections import OrderedDict
 

	
 
import mercurial.commands
 
import mercurial.error
 
import mercurial.exchange
 
import mercurial.hg
 
import mercurial.hgweb
 
import mercurial.httppeer
 
import mercurial.localrepo
 
import mercurial.match
 
import mercurial.mdiff
 
import mercurial.node
 
import mercurial.patch
 
import mercurial.scmutil
 
import mercurial.sshpeer
 
import mercurial.tags
 
import mercurial.ui
 
import mercurial.url
 
import mercurial.util
 

	
 
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
 
from kallithea.lib.vcs.exceptions import (BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
 
                                          TagDoesNotExistError, VCSError)
 
from kallithea.lib.vcs.utils import ascii_str, author_email, author_name, date_fromtimestamp, makedate, safe_bytes, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import abspath
 

	
 
from .changeset import MercurialChangeset
 
from .inmemory import MercurialInMemoryChangeset
 
from .workdir import MercurialWorkdir
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class MercurialRepository(BaseRepository):
 
    """
 
    Mercurial repository backend
 
    """
 
    DEFAULT_BRANCH_NAME = 'default'
 
    scm = 'hg'
 

	
 
    def __init__(self, repo_path, create=False, baseui=None, src_url=None,
 
                 update_after_clone=False):
 
        """
 
        Raises RepositoryError if repository could not be find at the given
 
        ``repo_path``.
 

	
 
        :param repo_path: local path of the repository
 
        :param create=False: if set to True, would try to create repository if
 
           it does not exist rather than raising exception
 
        :param baseui=None: user data
 
        :param src_url=None: would try to clone repository from given location
 
        :param update_after_clone=False: sets update of working copy after
 
          making a clone
 
        """
 

	
 
        if not isinstance(repo_path, str):
 
            raise VCSError('Mercurial backend requires repository path to '
 
                           'be instance of <str> got %s instead' %
 
                           type(repo_path))
 
        self.path = abspath(repo_path)
 
        self.baseui = baseui or mercurial.ui.ui()
 
        # We've set path and ui, now we can set _repo itself
 
        self._repo = self._get_repo(create, src_url, update_after_clone)
 

	
 
    @property
 
    def _empty(self):
 
        """
 
        Checks if repository is empty ie. without any changesets
 
        """
 
        # TODO: Following raises errors when using InMemoryChangeset...
 
        # return len(self._repo.changelog) == 0
 
        return len(self.revisions) == 0
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
        """
 
        return self._get_all_revisions()
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def branches(self):
 
        return self._get_branches()
 

	
 
    @LazyProperty
 
    def closed_branches(self):
 
        return self._get_branches(normal=False, closed=True)
 

	
 
    @LazyProperty
 
    def allbranches(self):
 
        """
 
        List all branches, including closed branches.
 
        """
 
        return self._get_branches(closed=True)
 

	
 
    def _get_branches(self, normal=True, closed=False):
 
        """
 
        Gets branches for this repository
 
        Returns only not closed branches by default
 

	
 
        :param closed: return also closed branches for mercurial
 
        :param normal: return also normal branches
 
        """
 

	
 
        if self._empty:
 
            return {}
 

	
 
        bt = OrderedDict()
 
        for bn, _heads, node, isclosed in sorted(self._repo.branchmap().iterbranches()):
 
            if isclosed:
 
                if closed:
 
                    bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
 
            else:
 
                if normal:
 
                    bt[safe_str(bn)] = ascii_str(mercurial.node.hex(node))
 
        return bt
 

	
 
    @LazyProperty
 
    def tags(self):
 
        """
 
        Gets tags for this repository
 
        """
 
        return self._get_tags()
 

	
 
    def _get_tags(self):
 
        if self._empty:
 
            return {}
 

	
 
        return OrderedDict(sorted(
 
            ((safe_str(n), ascii_str(mercurial.node.hex(h))) for n, h in self._repo.tags().items()),
 
            reverse=True,
 
            key=lambda x: x[0],  # sort by name
 
        ))
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None,
 
            **kwargs):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        if name in self.tags:
 
            raise TagAlreadyExistError("Tag %s already exists" % name)
 
        changeset = self.get_changeset(revision)
 
        local = kwargs.setdefault('local', False)
 

	
 
        if message is None:
 
            message = "Added tag %s for changeset %s" % (name,
 
                changeset.short_id)
 

	
 
        if date is None:
 
            date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
 

	
 
        try:
 
            mercurial.tags.tag(self._repo, safe_bytes(name), changeset._ctx.node(), safe_bytes(message), local, safe_bytes(user), date)
 
        except mercurial.error.Abort as e:
 
            raise RepositoryError(e.args[0])
 

	
 
        # Reinitialize tags
 
        self.tags = self._get_tags()
 
        tag_id = self.tags[name]
 

	
 
        return self.get_changeset(revision=tag_id)
 

	
 
    def remove_tag(self, name, user, message=None, date=None):
 
        """
 
        Removes tag with the given ``name``.
 

	
 
        :param name: name of the tag to be removed
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param message: message of the tag's removal commit
 
        :param date: date of tag's removal commit
 

	
 
        :raises TagDoesNotExistError: if tag with given name does not exists
 
        """
 
        if name not in self.tags:
 
            raise TagDoesNotExistError("Tag %s does not exist" % name)
 
        if message is None:
 
            message = "Removed tag %s" % name
 
        if date is None:
 
            date = safe_bytes(datetime.datetime.now().strftime('%a, %d %b %Y %H:%M:%S'))
 
        local = False
 

	
 
        try:
 
            mercurial.tags.tag(self._repo, safe_bytes(name), mercurial.commands.nullid, safe_bytes(message), local, safe_bytes(user), date)
 
            self.tags = self._get_tags()
 
        except mercurial.error.Abort as e:
 
            raise RepositoryError(e.args[0])
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        """
 
        Gets bookmarks for this repository
 
        """
 
        return self._get_bookmarks()
 

	
 
    def _get_bookmarks(self):
 
        if self._empty:
 
            return {}
 

	
 
        return OrderedDict(sorted(
 
            ((safe_str(n), ascii_str(h)) for n, h in self._repo._bookmarks.items()),
 
            reverse=True,
 
            key=lambda x: x[0],  # sort by name
 
        ))
 

	
 
    def _get_all_revisions(self):
 
        return [ascii_str(self._repo[x].hex()) for x in self._repo.filtered(b'visible').changelog.revs()]
 

	
 
    def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
 
                  context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``. If negative value is passed-in, it will be
 
          set to ``0`` instead.
 
        """
 

	
 
        # Negative context values make no sense, and will result in
 
        # errors. Ensure this does not happen.
 
        if context < 0:
 
            context = 0
 

	
 
        if hasattr(rev1, 'raw_id'):
 
            rev1 = getattr(rev1, 'raw_id')
 

	
 
        if hasattr(rev2, 'raw_id'):
 
            rev2 = getattr(rev2, 'raw_id')
 

	
 
        # Check if given revisions are present at repository (may raise
 
        # ChangesetDoesNotExistError)
 
        if rev1 != self.EMPTY_CHANGESET:
 
            self.get_changeset(rev1)
 
        self.get_changeset(rev2)
 
        if path:
 
            file_filter = mercurial.match.exact(path)
 
        else:
 
            file_filter = None
 

	
 
        return b''.join(mercurial.patch.diff(self._repo, rev1, rev2, match=file_filter,
 
                          opts=mercurial.mdiff.diffopts(git=True,
 
                                        showfunc=True,
 
                                        ignorews=ignore_whitespace,
 
                                        context=context)))
 

	
 
    @classmethod
 
    def _check_url(cls, url, repoui=None):
 
        """
 
        Function will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that mercurial will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError, exception is also thrown
 
        when the return code is non 200
 
        """
 
        # check first if it's not an local url
 
        url = safe_bytes(url)
 
        if os.path.isdir(url) or url.startswith(b'file:'):
 
            return True
 

	
 
        if url.startswith(b'ssh:'):
 
            # in case of invalid uri or authentication issues, sshpeer will
 
            # throw an exception.
 
            mercurial.sshpeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
 
            return True
 

	
 
        url_prefix = None
 
        if b'+' in url[:url.find(b'://')]:
 
            url_prefix, url = url.split(b'+', 1)
 

	
 
        handlers = []
 
        url_obj = mercurial.util.url(url)
 
        test_uri, authinfo = url_obj.authinfo()
 
        url_obj.passwd = b'*****'
 
        cleaned_uri = str(url_obj)
 

	
 
        if authinfo:
 
            # create a password manager
 
            passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((mercurial.url.httpbasicauthhandler(passmgr),
 
                             mercurial.url.httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib.request.build_opener(*handlers)
 
        o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
 
                        ('Accept', 'application/mercurial-0.1')]
 

	
 
        req = urllib.request.Request(
 
            "%s?%s" % (
 
                test_uri,
 
                urllib.parse.urlencode({
 
                    'cmd': 'between',
 
                    'pairs': "%s-%s" % ('0' * 40, '0' * 40),
 
                })
 
            ))
 

	
 
        try:
 
            resp = o.open(req)
 
            if resp.code != 200:
 
                raise Exception('Return Code is not 200')
 
        except Exception as e:
 
            # means it cannot be cloned
 
            raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 

	
 
        if not url_prefix: # skip svn+http://... (and git+... too)
 
            # now check if it's a proper hg repo
 
            try:
 
                mercurial.httppeer.instance(repoui or mercurial.ui.ui(), url, False).lookup(b'tip')
 
            except Exception as e:
 
                raise urllib.error.URLError(
 
                    "url [%s] does not look like an hg repo org_exc: %s"
 
                    % (cleaned_uri, e))
 

	
 
        return True
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False):
 
        """
 
        Function will check for mercurial repository in given path and return
 
        a localrepo object. If there is no repository in that path it will
 
        raise an exception unless ``create`` parameter is set to True - in
 
        that case repository would be created and returned.
 
        If ``src_url`` is given, would try to clone repository from the
 
        location at given clone_point. Additionally it'll make update to
 
        working copy accordingly to ``update_after_clone`` flag
 
        """
 
        try:
 
            if src_url:
 
                url = safe_bytes(self._get_url(src_url))
 
                opts = {}
 
                if not update_after_clone:
 
                    opts.update({'noupdate': True})
 
                MercurialRepository._check_url(url, self.baseui)
 
                mercurial.commands.clone(self.baseui, url, safe_bytes(self.path), **opts)
 

	
 
                # Don't try to create if we've already cloned repo
 
                create = False
 
            return mercurial.localrepo.instance(self.baseui, safe_bytes(self.path), create=create)
 
        except (mercurial.error.Abort, mercurial.error.RepoError) as err:
 
            if create:
 
                msg = "Cannot create repository at %s. Original error was %s" \
 
                    % (self.name, err)
 
            else:
 
                msg = "Not valid repository at %s. Original error was %s" \
 
                    % (self.name, err)
 
            raise RepositoryError(msg)
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        return MercurialInMemoryChangeset(self)
 

	
 
    @LazyProperty
 
    def description(self):
 
        _desc = self._repo.ui.config(b'web', b'description', None, untrusted=True)
 
        return safe_str(_desc or b'unknown')
 

	
 
    @LazyProperty
 
    def contact(self):
 
        return safe_str(mercurial.hgweb.common.get_contact(self._repo.ui.config)
 
                            or b'Unknown')
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            # fallback to filesystem
 
            cl_path = os.path.join(self.path, '.hg', "00changelog.i")
 
            st_path = os.path.join(self.path, '.hg', "store")
 
            if os.path.exists(cl_path):
 
                return os.stat(cl_path).st_mtime
 
            else:
 
                return os.stat(st_path).st_mtime
 

	
 
    def _get_revision(self, revision):
 
        """
 
        Given any revision identifier, returns a 40 char string with revision hash.
 

	
 
        :param revision: str or int or None
kallithea/model/db.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.db
 
~~~~~~~~~~~~~~~~~~
 

	
 
Database Models for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 08, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import base64
 
import collections
 
import datetime
 
import functools
 
import hashlib
 
import logging
 
import os
 
import time
 
import traceback
 

	
 
import ipaddr
 
import sqlalchemy
 
from beaker.cache import cache_region, region_invalidate
 
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, String, Unicode, UnicodeText, UniqueConstraint
 
from sqlalchemy.ext.hybrid import hybrid_property
 
from sqlalchemy.orm import class_mapper, joinedload, relationship, validates
 
from tg.i18n import lazy_ugettext as _
 
from webob.exc import HTTPNotFound
 

	
 
import kallithea
 
from kallithea.lib import ext_json
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.exceptions import DefaultUserException
 
from kallithea.lib.utils2 import (
 
    Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool, urlreadable)
 
from kallithea.lib.utils2 import (Optional, ascii_bytes, aslist, get_changeset_safe, get_clone_url, remove_prefix, safe_bytes, safe_int, safe_str, str2bool,
 
                                  urlreadable)
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.utils.helpers import get_scm
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model.meta import Base, Session
 

	
 

	
 
URL_SEP = '/'
 
log = logging.getLogger(__name__)
 

	
 
#==============================================================================
 
# BASE CLASSES
 
#==============================================================================
 

	
 
def _hash_key(k):
 
    return hashlib.md5(safe_bytes(k)).hexdigest()
 

	
 

	
 
class BaseDbModel(object):
 
    """
 
    Base Model for all classes
 
    """
 

	
 
    @classmethod
 
    def _get_keys(cls):
 
        """return column names for this model """
 
        # Note: not a normal dict - iterator gives "users.firstname", but keys gives "firstname"
 
        return class_mapper(cls).c.keys()
 

	
 
    def get_dict(self):
 
        """
 
        return dict with keys and values corresponding
 
        to this model data """
 

	
 
        d = {}
 
        for k in self._get_keys():
 
            d[k] = getattr(self, k)
 

	
 
        # also use __json__() if present to get additional fields
 
        _json_attr = getattr(self, '__json__', None)
 
        if _json_attr:
 
            # update with attributes from __json__
 
            if callable(_json_attr):
 
                _json_attr = _json_attr()
 
            for k, val in _json_attr.items():
 
                d[k] = val
 
        return d
 

	
 
    def get_appstruct(self):
 
        """return list with keys and values tuples corresponding
 
        to this model data """
 

	
 
        return [
 
            (k, getattr(self, k))
 
            for k in self._get_keys()
 
        ]
 

	
 
    def populate_obj(self, populate_dict):
 
        """populate model with data from given populate_dict"""
 

	
 
        for k in self._get_keys():
 
            if k in populate_dict:
 
                setattr(self, k, populate_dict[k])
 

	
 
    @classmethod
 
    def query(cls):
 
        return Session().query(cls)
 

	
 
    @classmethod
 
    def get(cls, id_):
 
        if id_:
 
            return cls.query().get(id_)
 

	
 
    @classmethod
 
    def guess_instance(cls, value, callback=None):
 
        """Haphazardly attempt to convert `value` to a `cls` instance.
 

	
 
        If `value` is None or already a `cls` instance, return it. If `value`
 
        is a number (or looks like one if you squint just right), assume it's
 
        a database primary key and let SQLAlchemy sort things out. Otherwise,
 
        fall back to resolving it using `callback` (if specified); this could
 
        e.g. be a function that looks up instances by name (though that won't
 
        work if the name begins with a digit). Otherwise, raise Exception.
 
        """
 

	
 
        if value is None:
 
            return None
 
        if isinstance(value, cls):
 
            return value
 
        if isinstance(value, int):
 
            return cls.get(value)
 
        if isinstance(value, str) and value.isdigit():
 
            return cls.get(int(value))
 
        if callback is not None:
 
            return callback(value)
 

	
 
        raise Exception(
 
            'given object must be int, long or Instance of %s '
 
            'got %s, no callback provided' % (cls, type(value))
 
        )
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        try:
 
            id_ = int(id_)
 
        except (TypeError, ValueError):
 
            raise HTTPNotFound
 

	
 
        res = cls.query().get(id_)
 
        if res is None:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def delete(cls, id_):
 
        obj = cls.query().get(id_)
 
        Session().delete(obj)
 

	
 
    def __repr__(self):
 
        return '<DB:%s>' % (self.__class__.__name__)
 

	
 

	
 
_table_args_default_dict = {'extend_existing': True,
 
                            'mysql_engine': 'InnoDB',
 
                            'mysql_charset': 'utf8',
 
                            'sqlite_autoincrement': True,
 
                           }
 

	
 
class Setting(Base, BaseDbModel):
 
    __tablename__ = 'settings'
 
    __table_args__ = (
 
        _table_args_default_dict,
 
    )
 

	
 
    SETTINGS_TYPES = {
 
        'str': safe_bytes,
 
        'int': safe_int,
 
        'unicode': safe_str,
 
        'bool': str2bool,
 
        'list': functools.partial(aslist, sep=',')
 
    }
 
    DEFAULT_UPDATE_URL = ''
 

	
 
    app_settings_id = Column(Integer(), primary_key=True)
 
    app_settings_name = Column(String(255), nullable=False, unique=True)
 
    _app_settings_value = Column("app_settings_value", Unicode(4096), nullable=False)
 
    _app_settings_type = Column("app_settings_type", String(255), nullable=True) # FIXME: not nullable?
 

	
 
    def __init__(self, key='', val='', type='unicode'):
 
        self.app_settings_name = key
 
        self.app_settings_value = val
 
        self.app_settings_type = type
 

	
 
    @validates('_app_settings_value')
 
    def validate_settings_value(self, key, val):
 
        assert isinstance(val, str)
 
        return val
 

	
 
    @hybrid_property
 
    def app_settings_value(self):
 
        v = self._app_settings_value
 
        _type = self.app_settings_type
 
        converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
 
        return converter(v)
 

	
 
    @app_settings_value.setter
 
    def app_settings_value(self, val):
 
        """
 
        Setter that will always make sure we use str in app_settings_value
 
        """
 
        self._app_settings_value = safe_str(val)
 

	
 
    @hybrid_property
 
    def app_settings_type(self):
 
        return self._app_settings_type
 

	
 
    @app_settings_type.setter
 
    def app_settings_type(self, val):
 
        if val not in self.SETTINGS_TYPES:
 
            raise Exception('type must be one of %s got %s'
 
                            % (list(self.SETTINGS_TYPES), val))
 
        self._app_settings_type = val
 

	
 
    def __repr__(self):
 
        return "<%s %s.%s=%r>" % (
 
            self.__class__.__name__,
 
            self.app_settings_name, self.app_settings_type, self.app_settings_value
 
        )
 

	
 
    @classmethod
 
    def get_by_name(cls, key):
 
        return cls.query() \
 
            .filter(cls.app_settings_name == key).scalar()
 

	
 
    @classmethod
 
    def get_by_name_or_create(cls, key, val='', type='unicode'):
 
        res = cls.get_by_name(key)
 
        if res is None:
 
            res = cls(key, val, type)
 
        return res
 

	
 
    @classmethod
 
    def create_or_update(cls, key, val=Optional(''), type=Optional('unicode')):
 
        """
 
        Creates or updates Kallithea setting. If updates are triggered, it will only
 
        update parameters that are explicitly set. Optional instance will be skipped.
 

	
 
        :param key:
 
        :param val:
 
        :param type:
 
        :return:
 
        """
 
        res = cls.get_by_name(key)
 
        if res is None:
 
            val = Optional.extract(val)
 
            type = Optional.extract(type)
 
            res = cls(key, val, type)
 
            Session().add(res)
 
        else:
 
            res.app_settings_name = key
 
            if not isinstance(val, Optional):
 
                # update if set
 
                res.app_settings_value = val
 
            if not isinstance(type, Optional):
 
                # update if set
 
                res.app_settings_type = type
 
        return res
 

	
 
    @classmethod
 
    def get_app_settings(cls, cache=False):
 

	
 
        ret = cls.query()
 

	
 
        if cache:
 
            ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
 

	
 
        if ret is None:
 
            raise Exception('Could not get application settings !')
 
        settings = {}
 
        for each in ret:
 
            settings[each.app_settings_name] = \
 
                each.app_settings_value
 

	
 
        return settings
 

	
 
    @classmethod
 
    def get_auth_settings(cls, cache=False):
 
        ret = cls.query() \
 
                .filter(cls.app_settings_name.startswith('auth_')).all()
 
        fd = {}
 
        for row in ret:
 
            fd[row.app_settings_name] = row.app_settings_value
 
        return fd
 

	
 
    @classmethod
 
    def get_default_repo_settings(cls, cache=False, strip_prefix=False):
 
        ret = cls.query() \
 
                .filter(cls.app_settings_name.startswith('default_')).all()
 
        fd = {}
 
        for row in ret:
 
            key = row.app_settings_name
 
            if strip_prefix:
 
                key = remove_prefix(key, prefix='default_')
 
            fd.update({key: row.app_settings_value})
 

	
 
        return fd
 

	
 
    @classmethod
 
    def get_server_info(cls):
 
        import pkg_resources
 
        import platform
 
        from kallithea.lib.utils import check_git_version
 
        mods = [(p.project_name, p.version) for p in pkg_resources.working_set]
 
        info = {
 
            'modules': sorted(mods, key=lambda k: k[0].lower()),
 
            'py_version': platform.python_version(),
 
            'platform': platform.platform(),
 
            'kallithea_version': kallithea.__version__,
 
            'git_version': str(check_git_version()),
 
            'git_path': kallithea.CONFIG.get('git_path')
 
        }
 
        return info
 

	
 

	
 
class Ui(Base, BaseDbModel):
 
    __tablename__ = 'ui'
 
    __table_args__ = (
 
        # FIXME: ui_key as key is wrong and should be removed when the corresponding
 
        # Ui.get_by_key has been replaced by the composite key
 
        UniqueConstraint('ui_key'),
 
        UniqueConstraint('ui_section', 'ui_key'),
 
        _table_args_default_dict,
 
    )
 

	
 
    HOOK_UPDATE = 'changegroup.update'
 
    HOOK_REPO_SIZE = 'changegroup.repo_size'
 

	
 
    ui_id = Column(Integer(), primary_key=True)
 
    ui_section = Column(String(255), nullable=False)
 
    ui_key = Column(String(255), nullable=False)
 
    ui_value = Column(String(255), nullable=True) # FIXME: not nullable?
 
    ui_active = Column(Boolean(), nullable=False, default=True)
 

	
 
    @classmethod
 
    def get_by_key(cls, section, key):
 
        """ Return specified Ui object, or None if not found. """
 
        return cls.query().filter_by(ui_section=section, ui_key=key).scalar()
 

	
 
    @classmethod
 
    def get_or_create(cls, section, key):
 
        """ Return specified Ui object, creating it if necessary. """
 
        setting = cls.get_by_key(section, key)
 
        if setting is None:
 
            setting = cls(ui_section=section, ui_key=key)
 
            Session().add(setting)
 
        return setting
 

	
 
    @classmethod
 
    def get_builtin_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_custom_hooks(cls):
 
        q = cls.query()
 
        q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, cls.HOOK_REPO_SIZE]))
 
        q = q.filter(cls.ui_section == 'hooks')
 
        return q.all()
 

	
 
    @classmethod
 
    def get_repos_location(cls):
 
        return cls.get_by_key('paths', '/').ui_value
 

	
 
    @classmethod
 
    def create_or_update_hook(cls, key, val):
 
        new_ui = cls.get_or_create('hooks', key)
 
        new_ui.ui_active = True
 
        new_ui.ui_value = val
 

	
 
    def __repr__(self):
 
        return '<%s %s.%s=%r>' % (
 
            self.__class__.__name__,
 
            self.ui_section, self.ui_key, self.ui_value)
 

	
 

	
 
class User(Base, BaseDbModel):
 
    __tablename__ = 'users'
 
    __table_args__ = (
 
        Index('u_username_idx', 'username'),
 
        Index('u_email_idx', 'email'),
 
        _table_args_default_dict,
 
    )
 

	
 
    DEFAULT_USER = 'default'
 
    DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
 
    # The name of the default auth type in extern_type, 'internal' lives in auth_internal.py
 
    DEFAULT_AUTH_TYPE = 'internal'
 

	
 
    user_id = Column(Integer(), primary_key=True)
 
    username = Column(String(255), nullable=False, unique=True)
 
    password = Column(String(255), nullable=False)
 
    active = Column(Boolean(), nullable=False, default=True)
 
    admin = Column(Boolean(), nullable=False, default=False)
 
    name = Column("firstname", Unicode(255), nullable=False)
 
    lastname = Column(Unicode(255), nullable=False)
 
    _email = Column("email", String(255), nullable=True, unique=True) # FIXME: not nullable?
 
    last_login = Column(DateTime(timezone=False), nullable=True)
 
    extern_type = Column(String(255), nullable=True) # FIXME: not nullable?
 
    extern_name = Column(String(255), nullable=True) # FIXME: not nullable?
 
    api_key = Column(String(255), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    _user_data = Column("user_data", LargeBinary(), nullable=True)  # JSON data # FIXME: not nullable?
 

	
 
    user_log = relationship('UserLog')
 
    user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
 

	
 
    repositories = relationship('Repository')
 
    repo_groups = relationship('RepoGroup')
 
    user_groups = relationship('UserGroup')
 
    user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
 
    followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
 

	
kallithea/model/repo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
import os
 
import shutil
 
import traceback
 
from datetime import datetime
 

	
 
import kallithea.lib.utils2
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.lib.hooks import log_delete_repository
 
from kallithea.lib.utils import is_valid_repo_uri, make_ui
 
from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.model.db import (
 
    Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
 
from kallithea.model.db import (Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm,
 
                                UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm)
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(object):
 

	
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
        repo_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        repo_to_perm.repository = repository
 
        repo_to_perm.user_id = def_user.user_id
 
        Session().add(repo_to_perm)
 

	
 
        return repo_to_perm
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = Ui.query().filter(Ui.ui_key == '/').one()
 
        return q.ui_value
 

	
 
    def get(self, repo_id, cache=False):
 
        repo = Repository.query() \
 
            .filter(Repository.repo_id == repo_id)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_id))
 
        return repo.scalar()
 

	
 
    def get_repo(self, repository):
 
        return Repository.guess_instance(repository)
 

	
 
    def get_by_repo_name(self, repo_name, cache=False):
 
        repo = Repository.query() \
 
            .filter(Repository.repo_name == repo_name)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_name))
 
        return repo.scalar()
 

	
 
    def get_all_user_repos(self, user):
 
        """
 
        Gets all repositories that user have at least read access
 

	
 
        :param user:
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        auth_user = AuthUser(dbuser=User.guess_instance(user))
 
        repos = [repo_name
 
            for repo_name, perm in auth_user.permissions['repositories'].items()
 
            if perm in ['repository.read', 'repository.write', 'repository.admin']
 
            ]
 
        return Repository.query().filter(Repository.repo_name.in_(repos))
 

	
 
    @classmethod
 
    def _render_datatable(cls, tmpl, *args, **kwargs):
 
        from tg import tmpl_context as c, request, app_globals
 
        from tg.i18n import ugettext as _
 

	
 
        _tmpl_lookup = app_globals.mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        tmpl = template.get_def(tmpl)
 
        kwargs.update(dict(_=_, h=h, c=c, request=request))
 
        return tmpl.render_unicode(*args, **kwargs)
 

	
 
    def get_repos_as_dict(self, repos_list, repo_groups_list=None,
 
                          admin=False,
 
                          short_name=False):
 
        """Return repository list for use by DataTable.
 
        repos_list: list of repositories - but will be filtered for read permission.
 
        repo_groups_list: added at top of list without permission check.
 
        admin: return data for action column.
 
        """
 
        _render = self._render_datatable
 
        from tg import tmpl_context as c, request
 
        from kallithea.model.scm import ScmModel
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=short_name)
 

	
 
        def following(repo_id, is_following):
 
            return _render('following', repo_id, is_following)
 

	
 
        def last_change(last_change):
 
            return _render("last_change", last_change)
 

	
 
        def rss_lnk(repo_name):
 
            return _render("rss", repo_name)
 

	
 
        def atom_lnk(repo_name):
 
            return _render("atom", repo_name)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels)
 

	
 
        def state(repo_state):
 
            return _render("repo_state", repo_state)
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name)
 

	
 
        def owner_actions(owner_id, username):
 
            return _render('user_name', owner_id, username)
 

	
 
        repos_data = []
 

	
 
        for gr in repo_groups_list or []:
 
            repos_data.append(dict(
 
                raw_name='\0' + gr.name, # sort before repositories
 
                just_name=gr.name,
 
                name=_render('group_name_html', group_name=gr.group_name, name=gr.name),
 
                desc=gr.group_description))
 

	
 
        for repo in repos_list:
 
            if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'):
 
                continue
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "raw_name": repo.repo_name,
 
                "just_name": repo.just_name,
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "following": following(
 
                    repo.repo_id,
 
                    ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id),
 
                ),
 
                "last_change_iso": repo.last_db_change.isoformat(),
 
                "last_change": last_change(repo.last_db_change),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "desc": desc(repo.description),
 
                "owner": h.person(repo.owner),
 
                "state": state(repo.repo_state),
 
                "rss": rss_lnk(repo.repo_name),
 
                "atom": atom_lnk(repo.repo_name),
 
            }
 
            if admin:
 
                row.update({
 
                    "action": repo_actions(repo.repo_name),
 
                    "owner": owner_actions(repo.owner_id,
 
                                           h.person(repo.owner))
 
                })
 
            repos_data.append(row)
 

	
 
        return {
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": repos_data
 
        }
 

	
 
    def _get_defaults(self, repo_name):
 
        """
 
        Gets information about repository, and returns a dict for
 
        usage in forms
 

	
 
        :param repo_name:
 
        """
 

	
 
        repo_info = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_info is None:
 
            return None
 

	
 
        defaults = repo_info.get_dict()
 
        defaults['repo_name'] = repo_info.just_name
 
        defaults['repo_group'] = repo_info.group_id
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.owner:
 
            defaults.update({'owner': repo_info.owner.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'owner': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = Repository.guess_instance(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'owner' in kwargs:
 
                cur_repo.owner = User.get_by_username(kwargs['owner'])
 

	
 
            if 'repo_group' in kwargs:
 
                assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
                cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name)
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                # clone_uri is modified - if given a value, check it is valid
 
                if clone_uri != '':
 
                    # will raise exception on error
 
                    is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui())
 
                cur_repo.clone_uri = clone_uri
 

	
 
            if 'repo_name' in kwargs:
 
                repo_name = kwargs['repo_name']
 
                if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
 
                    raise Exception('invalid repo name %s' % repo_name)
 
                cur_repo.repo_name = cur_repo.get_new_name(repo_name)
 

	
 
            # if private flag is set, reset default permission to NONE
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                # handle extra fields
 
            for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]:
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 

	
 
            if org_repo_name != cur_repo.repo_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
 
                     private=False, clone_uri=None, repo_group=None,
 
                     landing_rev='rev:tip', fork_of=None,
 
                     copy_fork_permissions=False, enable_statistics=False,
 
                     enable_downloads=False,
 
                     copy_group_permissions=False, state=Repository.STATE_PENDING):
 
        """
 
        Create repository inside database with PENDING state. This should only be
 
        executed by create() repo, with exception of importing existing repos.
 

	
 
        """
 
        from kallithea.model.scm import ScmModel
 

	
 
        owner = User.guess_instance(owner)
 
        fork_of = Repository.guess_instance(fork_of)
 
        repo_group = RepoGroup.guess_instance(repo_group)
 
        try:
 
            repo_name = repo_name
 
            description = description
 
            # repo name is just a name of repository
 
            # while repo_name_full is a full qualified name that is combined
 
            # with name and path of group
 
            repo_name_full = repo_name
 
            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
 
            if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
 
                raise Exception('invalid repo name %s' % repo_name)
 

	
 
            new_repo = Repository()
 
            new_repo.repo_state = state
 
            new_repo.enable_statistics = False
 
            new_repo.repo_name = repo_name_full
 
            new_repo.repo_type = repo_type
 
            new_repo.owner = owner
 
            new_repo.group = repo_group
 
            new_repo.description = description or repo_name
 
            new_repo.private = private
 
            if clone_uri:
 
                # will raise exception on error
 
                is_valid_repo_uri(repo_type, clone_uri, make_ui())
 
            new_repo.clone_uri = clone_uri
 
            new_repo.landing_rev = landing_rev
 

	
 
            new_repo.enable_statistics = enable_statistics
 
            new_repo.enable_downloads = enable_downloads
 

	
 
            if fork_of:
 
                parent_repo = fork_of
 
                new_repo.fork = parent_repo
 

	
 
            Session().add(new_repo)
 

	
 
            if fork_of and copy_fork_permissions:
 
                repo = fork_of
 
                user_perms = UserRepoToPerm.query() \
 
                    .filter(UserRepoToPerm.repository == repo).all()
 
                group_perms = UserGroupRepoToPerm.query() \
 
                    .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                for perm in user_perms:
 
                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 

	
 
                for perm in group_perms:
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                               perm.permission)
 

	
 
            elif repo_group and copy_group_permissions:
 

	
 
                user_perms = UserRepoGroupToPerm.query() \
 
                    .filter(UserRepoGroupToPerm.group == repo_group).all()
 

	
 
                group_perms = UserGroupRepoGroupToPerm.query() \
 
                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
 

	
 
                for perm in user_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 

	
 
                for perm in group_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 

	
 
            else:
 
                self._create_default_perms(new_repo, private)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id)
 
            # we need to flush here, in order to check if database won't
 
            # throw any exceptions, create filesystem dirs at the very end
 
            Session().flush()
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user):
 
        """
 
        Create repository using celery tasks
 

	
kallithea/model/user_group.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.user_group
 
~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
user group model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 1, 2011
 
:author: nvinot, marcink
 
"""
 

	
 

	
 
import logging
 
import traceback
 

	
 
from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException
 
from kallithea.model.db import (
 
    Permission, Session, User, UserGroup, UserGroupMember, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm, UserUserGroupToPerm)
 
from kallithea.model.db import (Permission, Session, User, UserGroup, UserGroupMember, UserGroupRepoToPerm, UserGroupToPerm, UserGroupUserGroupToPerm,
 
                                UserUserGroupToPerm)
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserGroupModel(object):
 

	
 
    def _create_default_perms(self, user_group):
 
        # create default permission
 
        default_perm = 'usergroup.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('usergroup.'):
 
                default_perm = p.permission.permission_name
 
                break
 

	
 
        user_group_to_perm = UserUserGroupToPerm()
 
        user_group_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        user_group_to_perm.user_group = user_group
 
        user_group_to_perm.user_id = def_user.user_id
 
        Session().add(user_group_to_perm)
 
        return user_group_to_perm
 

	
 
    def _update_permissions(self, user_group, perms_new=None,
 
                            perms_updates=None):
 
        from kallithea.lib.auth import HasUserGroupPermissionLevel
 
        if not perms_new:
 
            perms_new = []
 
        if not perms_updates:
 
            perms_updates = []
 

	
 
        # update permissions
 
        for member, perm, member_type in perms_updates:
 
            if member_type == 'user':
 
                # this updates existing one
 
                self.grant_user_permission(
 
                    user_group=user_group, user=member, perm=perm
 
                )
 
            else:
 
                # check if we have permissions to alter this usergroup's access
 
                if HasUserGroupPermissionLevel('read')(member):
 
                    self.grant_user_group_permission(
 
                        target_user_group=user_group, user_group=member, perm=perm
 
                    )
 
        # set new permissions
 
        for member, perm, member_type in perms_new:
 
            if member_type == 'user':
 
                self.grant_user_permission(
 
                    user_group=user_group, user=member, perm=perm
 
                )
 
            else:
 
                # check if we have permissions to alter this usergroup's access
 
                if HasUserGroupPermissionLevel('read')(member):
 
                    self.grant_user_group_permission(
 
                        target_user_group=user_group, user_group=member, perm=perm
 
                    )
 

	
 
    def get(self, user_group_id):
 
        return UserGroup.get(user_group_id)
 

	
 
    def get_group(self, user_group):
 
        return UserGroup.guess_instance(user_group)
 

	
 
    def get_by_name(self, name, cache=False, case_insensitive=False):
 
        return UserGroup.get_by_group_name(name, cache=cache, case_insensitive=case_insensitive)
 

	
 
    def create(self, name, description, owner, active=True, group_data=None):
 
        try:
 
            new_user_group = UserGroup()
 
            new_user_group.owner = User.guess_instance(owner)
 
            new_user_group.users_group_name = name
 
            new_user_group.user_group_description = description
 
            new_user_group.users_group_active = active
 
            if group_data:
 
                new_user_group.group_data = group_data
 
            Session().add(new_user_group)
 
            self._create_default_perms(new_user_group)
 

	
 
            self.grant_user_permission(user_group=new_user_group,
 
                                       user=owner, perm='usergroup.admin')
 

	
 
            return new_user_group
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update(self, user_group, form_data):
 

	
 
        try:
 
            user_group = UserGroup.guess_instance(user_group)
 

	
 
            for k, v in form_data.items():
 
                if k == 'users_group_members':
 
                    members_list = []
 
                    if v:
 
                        v = [v] if isinstance(v, str) else v
 
                        for u_id in set(v):
 
                            member = UserGroupMember(user_group.users_group_id, u_id)
 
                            members_list.append(member)
 
                            Session().add(member)
 
                    user_group.members = members_list
 
                setattr(user_group, k, v)
 

	
 
            # Flush to make db assign users_group_member_id to newly
 
            # created UserGroupMembers.
 
            Session().flush()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def delete(self, user_group, force=False):
 
        """
 
        Deletes user group, unless force flag is used
 
        raises exception if there are members in that group, else deletes
 
        group and users
 

	
 
        :param user_group:
 
        :param force:
 
        """
 
        user_group = UserGroup.guess_instance(user_group)
 
        try:
 
            # check if this group is not assigned to repo
 
            assigned_groups = UserGroupRepoToPerm.query() \
 
                .filter(UserGroupRepoToPerm.users_group == user_group).all()
 
            assigned_groups = [x.repository.repo_name for x in assigned_groups]
 

	
 
            if assigned_groups and not force:
 
                raise UserGroupsAssignedException(
 
                    'User Group assigned to %s' % ", ".join(assigned_groups))
 
            Session().delete(user_group)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def add_user_to_group(self, user_group, user):
 
        """Return True if user already is in the group - else return the new UserGroupMember"""
 
        user_group = UserGroup.guess_instance(user_group)
 
        user = User.guess_instance(user)
 

	
 
        for m in user_group.members:
 
            u = m.user
 
            if u.user_id == user.user_id:
 
                # user already in the group, skip
 
                return True
 

	
 
        try:
 
            user_group_member = UserGroupMember()
 
            user_group_member.user = user
 
            user_group_member.users_group = user_group
 

	
 
            user_group.members.append(user_group_member)
 
            user.group_member.append(user_group_member)
 

	
 
            Session().add(user_group_member)
 
            return user_group_member
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def remove_user_from_group(self, user_group, user):
 
        user_group = UserGroup.guess_instance(user_group)
 
        user = User.guess_instance(user)
 

	
 
        user_group_member = None
 
        for m in user_group.members:
 
            if m.user_id == user.user_id:
 
                # Found this user's membership row
 
                user_group_member = m
 
                break
 

	
 
        if user_group_member:
 
            try:
 
                Session().delete(user_group_member)
 
                return True
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 
        else:
 
            # User isn't in that group
 
            return False
 

	
 
    def has_perm(self, user_group, perm):
 
        user_group = UserGroup.guess_instance(user_group)
 
        perm = Permission.guess_instance(perm)
 

	
 
        return UserGroupToPerm.query() \
 
            .filter(UserGroupToPerm.users_group == user_group) \
 
            .filter(UserGroupToPerm.permission == perm).scalar() is not None
 

	
 
    def grant_perm(self, user_group, perm):
 
        user_group = UserGroup.guess_instance(user_group)
 
        perm = Permission.guess_instance(perm)
 

	
 
        # if this permission is already granted skip it
 
        _perm = UserGroupToPerm.query() \
 
            .filter(UserGroupToPerm.users_group == user_group) \
 
            .filter(UserGroupToPerm.permission == perm) \
 
            .scalar()
 
        if _perm:
 
            return
 

	
 
        new = UserGroupToPerm()
 
        new.users_group = user_group
 
        new.permission = perm
 
        Session().add(new)
 
        return new
 

	
 
    def revoke_perm(self, user_group, perm):
 
        user_group = UserGroup.guess_instance(user_group)
 
        perm = Permission.guess_instance(perm)
 

	
 
        obj = UserGroupToPerm.query() \
 
            .filter(UserGroupToPerm.users_group == user_group) \
 
            .filter(UserGroupToPerm.permission == perm).scalar()
 
        if obj is not None:
 
            Session().delete(obj)
 

	
 
    def grant_user_permission(self, user_group, user, perm):
 
        """
 
        Grant permission for user on given user group, or update
 
        existing one if found
 

	
 
        :param user_group: Instance of UserGroup, users_group_id,
 
            or users_group_name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 

	
 
        user_group = UserGroup.guess_instance(user_group)
 
        user = User.guess_instance(user)
 
        permission = Permission.guess_instance(perm)
 

	
 
        # check if we have that permission already
 
        obj = UserUserGroupToPerm.query() \
 
            .filter(UserUserGroupToPerm.user == user) \
 
            .filter(UserUserGroupToPerm.user_group == user_group) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserUserGroupToPerm()
 
            Session().add(obj)
 
        obj.user_group = user_group
 
        obj.user = user
 
        obj.permission = permission
 
        log.debug('Granted perm %s to %s on %s', perm, user, user_group)
 
        return obj
 

	
 
    def revoke_user_permission(self, user_group, user):
 
        """
 
        Revoke permission for user on given repository group
 

	
 
        :param user_group: Instance of RepoGroup, repositories_group_id,
 
            or repositories_group name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user_group = UserGroup.guess_instance(user_group)
 
        user = User.guess_instance(user)
 

	
 
        obj = UserUserGroupToPerm.query() \
 
            .filter(UserUserGroupToPerm.user == user) \
 
            .filter(UserUserGroupToPerm.user_group == user_group) \
 
            .scalar()
 
        if obj is not None:
 
            Session().delete(obj)
 
            log.debug('Revoked perm on %s on %s', user_group, user)
 

	
 
    def grant_user_group_permission(self, target_user_group, user_group, perm):
 
        """
 
        Grant user group permission for given target_user_group
 

	
 
        :param target_user_group:
 
        :param user_group:
 
        :param perm:
 
        """
 
        target_user_group = UserGroup.guess_instance(target_user_group)
 
        user_group = UserGroup.guess_instance(user_group)
 
        permission = Permission.guess_instance(perm)
 
        # forbid assigning same user group to itself
 
        if target_user_group == user_group:
 
            raise RepoGroupAssignmentError('target repo:%s cannot be '
 
                                           'assigned to itself' % target_user_group)
 

	
 
        # check if we have that permission already
 
        obj = UserGroupUserGroupToPerm.query() \
 
            .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \
 
            .filter(UserGroupUserGroupToPerm.user_group == user_group) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserGroupUserGroupToPerm()
 
            Session().add(obj)
 
        obj.user_group = user_group
 
        obj.target_user_group = target_user_group
 
        obj.permission = permission
 
        log.debug('Granted perm %s to %s on %s', perm, target_user_group, user_group)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, target_user_group, user_group):
 
        """
 
        Revoke user group permission for given target_user_group
 

	
 
        :param target_user_group:
 
        :param user_group:
 
        """
 
        target_user_group = UserGroup.guess_instance(target_user_group)
 
        user_group = UserGroup.guess_instance(user_group)
 

	
 
        obj = UserGroupUserGroupToPerm.query() \
 
            .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \
 
            .filter(UserGroupUserGroupToPerm.user_group == user_group) \
 
            .scalar()
 
        if obj is not None:
 
            Session().delete(obj)
 
            log.debug('Revoked perm on %s on %s', target_user_group, user_group)
 

	
 
    def enforce_groups(self, user, groups, extern_type=None):
 
        user = User.guess_instance(user)
 
        log.debug('Enforcing groups %s on user %s', user, groups)
 
        current_groups = user.group_member
 
        # find the external created groups
 
        externals = [x.users_group for x in current_groups
 
                     if 'extern_type' in x.users_group.group_data]
 

	
 
        # calculate from what groups user should be removed
 
        # externals that are not in groups
 
        for gr in externals:
 
            if gr.users_group_name not in groups:
 
                log.debug('Removing user %s from user group %s', user, gr)
 
                self.remove_user_from_group(gr, user)
 

	
 
        # now we calculate in which groups user should be == groups params
 
        owner = User.get_first_admin().username
 
        for gr in set(groups):
 
            existing_group = UserGroup.get_by_group_name(gr)
 
            if not existing_group:
 
                desc = 'Automatically created from plugin:%s' % extern_type
 
                # we use first admin account to set the owner of the group
 
                existing_group = UserGroupModel().create(gr, desc, owner,
 
                                        group_data={'extern_type': extern_type})
 

	
 
            # we can only add users to special groups created via plugins
 
            managed = 'extern_type' in existing_group.group_data
 
            if managed:
 
                log.debug('Adding user %s to user group %s', user, gr)
 
                UserGroupModel().add_user_to_group(existing_group, user)
 
            else:
 
                log.debug('Skipping addition to group %s since it is '
 
                          'not managed by auth plugins' % gr)
kallithea/tests/fixture.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
Helpers for fixture generation
 
"""
 

	
 
import logging
 
import os
 
import shutil
 
import tarfile
 
from os.path import dirname
 

	
 
import mock
 
from tg import request
 
from tg.util.webtest import test_context
 

	
 
from kallithea.lib import helpers
 
from kallithea.lib.auth import AuthUser
 
from kallithea.lib.db_manage import DbManage
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.db import ChangesetStatus, Gist, RepoGroup, Repository, User, UserGroup
 
from kallithea.model.gist import GistModel
 
from kallithea.model.meta import Session
 
from kallithea.model.pull_request import CreatePullRequestAction  # , CreatePullRequestIterationAction, PullRequestModel
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.user import UserModel
 
from kallithea.model.user_group import UserGroupModel
 
from kallithea.tests.base import (
 
    GIT_REPO, HG_REPO, IP_ADDR, TEST_USER_ADMIN_EMAIL, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH, invalidate_all_caches)
 
from kallithea.tests.base import (GIT_REPO, HG_REPO, IP_ADDR, TEST_USER_ADMIN_EMAIL, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH,
 
                                  invalidate_all_caches)
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
FIXTURES = os.path.join(dirname(dirname(os.path.abspath(__file__))), 'tests', 'fixtures')
 

	
 

	
 
def error_function(*args, **kwargs):
 
    raise Exception('Total Crash !')
 

	
 

	
 
class Fixture(object):
 

	
 
    def __init__(self):
 
        pass
 

	
 
    def anon_access(self, status):
 
        """
 
        Context manager for controlling anonymous access.
 
        Anon access will be set and committed, but restored again when exiting the block.
 

	
 
        Usage:
 

	
 
        fixture = Fixture()
 
        with fixture.anon_access(False):
 
            stuff
 
        """
 

	
 
        class context(object):
 
            def __enter__(self):
 
                anon = User.get_default_user()
 
                self._before = anon.active
 
                anon.active = status
 
                Session().commit()
 
                invalidate_all_caches()
 

	
 
            def __exit__(self, exc_type, exc_val, exc_tb):
 
                anon = User.get_default_user()
 
                anon.active = self._before
 
                Session().commit()
 

	
 
        return context()
 

	
 
    def _get_repo_create_params(self, **custom):
 
        """Return form values to be validated through RepoForm"""
 
        defs = dict(
 
            repo_name=None,
 
            repo_type='hg',
 
            clone_uri='',
 
            repo_group='-1',
 
            repo_description='DESC',
 
            repo_private=False,
 
            repo_landing_rev='rev:tip',
 
            repo_copy_permissions=False,
 
            repo_state=Repository.STATE_CREATED,
 
        )
 
        defs.update(custom)
 
        if 'repo_name_full' not in custom:
 
            defs.update({'repo_name_full': defs['repo_name']})
 

	
 
        # fix the repo name if passed as repo_name_full
 
        if defs['repo_name']:
 
            defs['repo_name'] = defs['repo_name'].split('/')[-1]
 

	
 
        return defs
 

	
 
    def _get_repo_group_create_params(self, **custom):
 
        """Return form values to be validated through RepoGroupForm"""
 
        defs = dict(
 
            group_name=None,
 
            group_description='DESC',
 
            parent_group_id='-1',
 
            perms_updates=[],
 
            perms_new=[],
 
            recursive=False
 
        )
 
        defs.update(custom)
 

	
 
        return defs
 

	
 
    def _get_user_create_params(self, name, **custom):
 
        defs = dict(
 
            username=name,
 
            password='qweqwe',
 
            email='%s+test@example.com' % name,
 
            firstname='TestUser',
 
            lastname='Test',
 
            active=True,
 
            admin=False,
 
            extern_type='internal',
 
            extern_name=None
 
        )
 
        defs.update(custom)
 

	
 
        return defs
 

	
 
    def _get_user_group_create_params(self, name, **custom):
 
        defs = dict(
 
            users_group_name=name,
 
            user_group_description='DESC',
 
            users_group_active=True,
 
            user_group_data={},
 
        )
 
        defs.update(custom)
 

	
 
        return defs
 

	
 
    def create_repo(self, name, repo_group=None, **kwargs):
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            r = Repository.get_by_repo_name(name)
 
            if r:
 
                return r
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            repo_group = repo_group.group_id
 

	
 
        form_data = self._get_repo_create_params(repo_name=name, **kwargs)
 
        form_data['repo_group'] = repo_group # patch form dict so it can be used directly by model
 
        cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
 
        RepoModel().create(form_data, cur_user)
 
        Session().commit()
 
        ScmModel().mark_for_invalidation(name)
 
        return Repository.get_by_repo_name(name)
 

	
 
    def create_fork(self, repo_to_fork, fork_name, **kwargs):
 
        repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
 

	
 
        form_data = self._get_repo_create_params(repo_name=fork_name,
 
                                            fork_parent_id=repo_to_fork,
 
                                            repo_type=repo_to_fork.repo_type,
 
                                            **kwargs)
 
        # patch form dict so it can be used directly by model
 
        form_data['description'] = form_data['repo_description']
 
        form_data['private'] = form_data['repo_private']
 
        form_data['landing_rev'] = form_data['repo_landing_rev']
 

	
 
        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
 
        RepoModel().create_fork(form_data, cur_user=owner)
 
        Session().commit()
 
        ScmModel().mark_for_invalidation(fork_name)
 
        r = Repository.get_by_repo_name(fork_name)
 
        assert r
 
        return r
 

	
 
    def destroy_repo(self, repo_name, **kwargs):
 
        RepoModel().delete(repo_name, **kwargs)
 
        Session().commit()
 

	
 
    def create_repo_group(self, name, parent_group_id=None, **kwargs):
 
        assert '/' not in name, (name, kwargs) # use group_parent_id to make nested groups
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            gr = RepoGroup.get_by_group_name(group_name=name)
 
            if gr:
 
                return gr
 
        form_data = self._get_repo_group_create_params(group_name=name, **kwargs)
 
        gr = RepoGroupModel().create(
 
            group_name=form_data['group_name'],
 
            group_description=form_data['group_name'],
 
            parent=parent_group_id,
 
            owner=kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN),
 
            )
 
        Session().commit()
 
        gr = RepoGroup.get_by_group_name(gr.group_name)
 
        return gr
 

	
 
    def destroy_repo_group(self, repogroupid):
 
        RepoGroupModel().delete(repogroupid)
 
        Session().commit()
 

	
 
    def create_user(self, name, **kwargs):
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            user = User.get_by_username(name)
 
            if user:
 
                return user
 
        form_data = self._get_user_create_params(name, **kwargs)
 
        user = UserModel().create(form_data)
 
        Session().commit()
 
        user = User.get_by_username(user.username)
 
        return user
 

	
 
    def destroy_user(self, userid):
 
        UserModel().delete(userid)
 
        Session().commit()
 

	
 
    def create_user_group(self, name, **kwargs):
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            gr = UserGroup.get_by_group_name(group_name=name)
 
            if gr:
 
                return gr
 
        form_data = self._get_user_group_create_params(name, **kwargs)
 
        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
 
        user_group = UserGroupModel().create(
 
            name=form_data['users_group_name'],
 
            description=form_data['user_group_description'],
 
            owner=owner, active=form_data['users_group_active'],
 
            group_data=form_data['user_group_data'])
 
        Session().commit()
 
        user_group = UserGroup.get_by_group_name(user_group.users_group_name)
 
        return user_group
 

	
 
    def destroy_user_group(self, usergroupid):
 
        UserGroupModel().delete(user_group=usergroupid, force=True)
 
        Session().commit()
 

	
 
    def create_gist(self, **kwargs):
 
        form_data = {
 
            'description': 'new-gist',
 
            'owner': TEST_USER_ADMIN_LOGIN,
 
            'gist_type': Gist.GIST_PUBLIC,
 
            'lifetime': -1,
 
            'gist_mapping': {'filename1.txt': {'content': 'hello world'}}
 
        }
 
        form_data.update(kwargs)
 
        gist = GistModel().create(
 
            description=form_data['description'], owner=form_data['owner'], ip_addr=IP_ADDR,
 
            gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
 
            lifetime=form_data['lifetime']
 
        )
 
        Session().commit()
 

	
 
        return gist
 

	
 
    def destroy_gists(self, gistid=None):
 
        for g in Gist.query():
 
            if gistid:
 
                if gistid == g.gist_access_id:
 
                    GistModel().delete(g)
 
            else:
 
                GistModel().delete(g)
 
        Session().commit()
 

	
 
    def load_resource(self, resource_name, strip=True):
 
        with open(os.path.join(FIXTURES, resource_name), 'rb') as f:
 
            source = f.read()
 
            if strip:
 
                source = source.strip()
 

	
 
        return source
 

	
 
    def commit_change(self, repo, filename, content, message, vcs_type,
 
                      parent=None, newfile=False, author=None):
 
        repo = Repository.get_by_repo_name(repo)
 
        _cs = parent
 
        if parent is None:
 
            _cs = EmptyChangeset(alias=vcs_type)
 
        if author is None:
 
            author = '%s <%s>' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_EMAIL)
 

	
 
        if newfile:
 
            nodes = {
 
                filename: {
 
                    'content': content
 
                }
 
            }
 
            cs = ScmModel().create_nodes(
 
                user=TEST_USER_ADMIN_LOGIN,
 
                ip_addr=IP_ADDR,
 
                repo=repo,
 
                message=message,
 
                nodes=nodes,
 
                parent_cs=_cs,
 
                author=author,
 
            )
 
        else:
 
            cs = ScmModel().commit_change(
 
                repo=repo.scm_instance, repo_name=repo.repo_name,
 
                cs=parent,
 
                user=TEST_USER_ADMIN_LOGIN,
 
                ip_addr=IP_ADDR,
 
                author=author,
 
                message=message,
 
                content=content,
 
                f_path=filename
 
            )
 
        return cs
 

	
 
    def review_changeset(self, repo, revision, status, author=TEST_USER_ADMIN_LOGIN):
 
        comment = ChangesetCommentsModel().create("review comment", repo, author, revision=revision, send_email=False)
 
        csm = ChangesetStatusModel().set_status(repo, ChangesetStatus.STATUS_APPROVED, author, comment, revision=revision)
 
        Session().commit()
 
        return csm
 

	
 
    def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title='title'):
 
        org_ref = 'branch:stable:%s' % pr_src_rev
 
        other_ref = 'branch:default:%s' % pr_dst_rev
 
        with test_context(testcontroller.app): # needed to be able to mock request user
 
            org_repo = other_repo = Repository.get_by_repo_name(repo_name)
 
            owner_user = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
            reviewers = [User.get_by_username(TEST_USER_REGULAR_LOGIN)]
 
            request.authuser = AuthUser(dbuser=owner_user)
 
            # creating a PR sends a message with an absolute URL - without routing that requires mocking
 
            with mock.patch.object(helpers, 'url', (lambda arg, qualified=False, **kwargs: ('https://localhost' if qualified else '') + '/fake/' + arg)):
 
                cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, 'No description', owner_user, reviewers)
 
                pull_request = cmd.execute()
 
            Session().commit()
 
        return pull_request.pull_request_id
 

	
 

	
 
#==============================================================================
 
# Global test environment setup
 
#==============================================================================
 

	
 
def create_test_env(repos_test_path, config):
 
    """
 
    Makes a fresh database and
 
    install test repository into tmp dir
 
    """
 

	
 
    # PART ONE create db
 
    dbconf = config['sqlalchemy.url']
 
    log.debug('making test db %s', dbconf)
 

	
 
    # create test dir if it doesn't exist
 
    if not os.path.isdir(repos_test_path):
 
        log.debug('Creating testdir %s', repos_test_path)
 
        os.makedirs(repos_test_path)
 

	
 
    dbmanage = DbManage(dbconf=dbconf, root=config['here'],
 
                        tests=True)
 
    dbmanage.create_tables(override=True)
 
    # for tests dynamically set new root paths based on generated content
 
    dbmanage.create_settings(dbmanage.prompt_repo_root_path(repos_test_path))
 
    dbmanage.create_default_user()
 
    dbmanage.admin_prompt()
 
    dbmanage.create_permissions()
 
    dbmanage.populate_default_permissions()
 
    Session().commit()
 
    # PART TWO make test repo
 
    log.debug('making test vcs repositories')
 

	
 
    idx_path = config['index_dir']
 
    data_path = config['cache_dir']
 

	
 
    # clean index and data
 
    if idx_path and os.path.exists(idx_path):
 
        log.debug('remove %s', idx_path)
 
        shutil.rmtree(idx_path)
 

	
 
    if data_path and os.path.exists(data_path):
 
        log.debug('remove %s', data_path)
 
        shutil.rmtree(data_path)
 

	
 
    # CREATE DEFAULT TEST REPOS
 
    tar = tarfile.open(os.path.join(FIXTURES, 'vcs_test_hg.tar.gz'))
 
    tar.extractall(os.path.join(TESTS_TMP_PATH, HG_REPO))
 
    tar.close()
 

	
 
    tar = tarfile.open(os.path.join(FIXTURES, 'vcs_test_git.tar.gz'))
 
    tar.extractall(os.path.join(TESTS_TMP_PATH, GIT_REPO))
 
    tar.close()
 

	
 
    # LOAD VCS test stuff
 
    from kallithea.tests.vcs import setup_package
 
    setup_package()
 

	
 

	
 
def create_test_index(repo_location, config, full_index):
 
    """
 
    Makes default test index
 
    """
 

	
 
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
    from kallithea.lib.pidlock import DaemonLock
 

	
 
    index_location = os.path.join(config['index_dir'])
 
    if not os.path.exists(index_location):
 
        os.makedirs(index_location)
 

	
 
    l = DaemonLock(os.path.join(index_location, 'make_index.lock'))
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location) \
 
        .run(full_index=full_index)
 
    l.release()
 

	
 

	
 
def failing_test_hook(ui, repo, **kwargs):
 
    ui.write(b"failing_test_hook failed\n")
 
    return 1
 

	
 

	
kallithea/tests/vcs/conf.py
Show inline comments
 
"""
 
Unit tests configuration module for vcs.
 
"""
 
import os
 
import uuid
 

	
 
# Retrieve the necessary configuration options from the test base
 
# module. Some of these configuration options are subsequently
 
# consumed by the VCS test module.
 
from kallithea.tests.base import (
 
    GIT_REMOTE_REPO, HG_REMOTE_REPO, TEST_GIT_REPO, TEST_GIT_REPO_CLONE, TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL, TESTS_TMP_PATH)
 
from kallithea.tests.base import (GIT_REMOTE_REPO, HG_REMOTE_REPO, TEST_GIT_REPO, TEST_GIT_REPO_CLONE, TEST_HG_REPO, TEST_HG_REPO_CLONE, TEST_HG_REPO_PULL,
 
                                  TESTS_TMP_PATH)
 

	
 

	
 
__all__ = (
 
    'TEST_HG_REPO', 'TEST_GIT_REPO', 'HG_REMOTE_REPO', 'GIT_REMOTE_REPO',
 
    'TEST_HG_REPO_CLONE', 'TEST_GIT_REPO_CLONE', 'TEST_HG_REPO_PULL',
 
)
 

	
 

	
 
def get_new_dir(title=None):
 
    """
 
    Calculates a path for a new, non-existant, unique sub-directory in TESTS_TMP_PATH.
 

	
 
    Resulting directory name will have format:
 

	
 
    vcs-test-[title-]hexuuid
 

	
 
    The "hexuuid" is a hexadecimal value of a randomly generated
 
    UUID. Title will be added if specified.
 

	
 
    Args:
 
        title: Custom title to include as part of the resulting sub-directory
 
            name. Can be useful for debugging to identify destination. Defaults
 
            to None.
 

	
 
    Returns:
 
        Path to the new directory as a string.
 
    """
 

	
 
    test_repo_prefix = 'vcs-test'
 

	
 
    if title:
 
        name = "%s-%s" % (test_repo_prefix, title)
 
    else:
 
        name = test_repo_prefix
 

	
 
    path = os.path.join(TESTS_TMP_PATH, name)
 

	
 
    # Generate new hexes until we get a unique name (just in case).
 
    hex_uuid = uuid.uuid4().hex
 
    while os.path.exists("%s-%s" % (path, hex_uuid)):
 
        hex_uuid = uuid.uuid4().hex
 

	
 
    return "%s-%s" % (path, hex_uuid)
kallithea/tests/vcs/test_inmemchangesets.py
Show inline comments
 
# encoding: utf-8
 
"""
 
Tests so called "in memory changesets" commit API of vcs.
 
"""
 

	
 
import datetime
 

	
 
import pytest
 

	
 
from kallithea.lib.vcs.exceptions import (
 
    EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError)
 
from kallithea.lib.vcs.exceptions import (EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
 
                                          NodeDoesNotExistError, NodeNotChangedError)
 
from kallithea.lib.vcs.nodes import DirNode, FileNode
 
from kallithea.tests.vcs.base import _BackendTestMixin
 

	
 

	
 
class InMemoryChangesetTestMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        # Note: this is slightly different than the regular _get_commits methods
 
        # as we don't actually return any commits. The creation of commits is
 
        # handled in the tests themselves.
 
        cls.nodes = [
 
            FileNode('foobar', content='Foo & bar'),
 
            FileNode('foobar2', content='Foo & bar, doubled!'),
 
            FileNode('foo bar with spaces', content=''),
 
            FileNode('foo/bar/baz', content='Inside'),
 
            FileNode('foo/bar/file.bin', content='\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'),
 
        ]
 
        commits = []
 
        return commits
 

	
 
    def test_add(self):
 
        rev_count = len(self.repo.revisions)
 
        to_add = [FileNode(node.path, content=node.content)
 
            for node in self.nodes]
 
        for node in to_add:
 
            self.imc.add(node)
 
        message = 'Added: %s' % ', '.join((node.path for node in self.nodes))
 
        author = str(self.__class__)
 
        changeset = self.imc.commit(message=message, author=author)
 

	
 
        newtip = self.repo.get_changeset()
 
        assert changeset == newtip
 
        assert rev_count + 1 == len(self.repo.revisions)
 
        assert newtip.message == message
 
        assert newtip.author == author
 
        assert not any((
 
            self.imc.added,
 
            self.imc.changed,
 
            self.imc.removed
 
        ))
 
        for node in to_add:
 
            assert newtip.get_node(node.path).content == node.content
 

	
 
    def test_add_in_bulk(self):
 
        rev_count = len(self.repo.revisions)
 
        to_add = [FileNode(node.path, content=node.content)
 
            for node in self.nodes]
 
        self.imc.add(*to_add)
 
        message = 'Added: %s' % ', '.join((node.path for node in self.nodes))
 
        author = str(self.__class__)
 
        changeset = self.imc.commit(message=message, author=author)
 

	
 
        newtip = self.repo.get_changeset()
 
        assert changeset == newtip
 
        assert rev_count + 1 == len(self.repo.revisions)
 
        assert newtip.message == message
 
        assert newtip.author == author
 
        assert not any((
 
            self.imc.added,
 
            self.imc.changed,
 
            self.imc.removed
 
        ))
 
        for node in to_add:
 
            assert newtip.get_node(node.path).content == node.content
 

	
 
    def test_add_actually_adds_all_nodes_at_second_commit_too(self):
 
        self.imc.add(FileNode('foo/bar/image.png', content='\0'))
 
        self.imc.add(FileNode('foo/README.txt', content='readme!'))
 
        changeset = self.imc.commit('Initial', 'joe.doe@example.com')
 
        assert isinstance(changeset.get_node('foo'), DirNode)
 
        assert isinstance(changeset.get_node('foo/bar'), DirNode)
 
        assert changeset.get_node('foo/bar/image.png').content == b'\0'
 
        assert changeset.get_node('foo/README.txt').content == b'readme!'
 

	
 
        # commit some more files again
 
        to_add = [
 
            FileNode('foo/bar/foobaz/bar', content='foo'),
 
            FileNode('foo/bar/another/bar', content='foo'),
 
            FileNode('foo/baz.txt', content='foo'),
 
            FileNode('foobar/foobaz/file', content='foo'),
 
            FileNode('foobar/barbaz', content='foo'),
 
        ]
 
        self.imc.add(*to_add)
 
        changeset = self.imc.commit('Another', 'joe.doe@example.com')
 
        changeset.get_node('foo/bar/foobaz/bar').content == b'foo'
 
        changeset.get_node('foo/bar/another/bar').content == b'foo'
 
        changeset.get_node('foo/baz.txt').content == b'foo'
 
        changeset.get_node('foobar/foobaz/file').content == b'foo'
 
        changeset.get_node('foobar/barbaz').content == b'foo'
 

	
 
    def test_add_non_ascii_files(self):
 
        rev_count = len(self.repo.revisions)
 
        to_add = [
 
            FileNode('żółwik/zwierzątko', content='ćććć'),
 
            FileNode('żółwik/zwierzątko_uni', content='ćććć'),
 
        ]
 
        for node in to_add:
 
            self.imc.add(node)
 
        message = 'Added: %s' % ', '.join((node.path for node in self.nodes))
 
        author = str(self.__class__)
 
        changeset = self.imc.commit(message=message, author=author)
 

	
 
        newtip = self.repo.get_changeset()
 
        assert changeset == newtip
 
        assert rev_count + 1 == len(self.repo.revisions)
 
        assert newtip.message == message
 
        assert newtip.author == author
 
        assert not any((
 
            self.imc.added,
 
            self.imc.changed,
 
            self.imc.removed
 
        ))
 
        for node in to_add:
 
            assert newtip.get_node(node.path).content == node.content
 

	
 
    def test_add_raise_already_added(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        with pytest.raises(NodeAlreadyAddedError):
 
            self.imc.add(node)
 

	
 
    def test_check_integrity_raise_already_exist(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message='Added foobar', author=str(self))
 
        self.imc.add(node)
 
        with pytest.raises(NodeAlreadyExistsError):
 
            self.imc.commit(message='new message',
 
                            author=str(self))
 

	
 
    def test_change(self):
 
        self.imc.add(FileNode('foo/bar/baz', content='foo'))
 
        self.imc.add(FileNode('foo/fbar', content='foobar'))
 
        tip = self.imc.commit('Initial', 'joe.doe@example.com')
 

	
 
        # Change node's content
 
        node = FileNode('foo/bar/baz', content='My **changed** content')
 
        self.imc.change(node)
 
        self.imc.commit('Changed %s' % node.path, 'joe.doe@example.com')
 

	
 
        newtip = self.repo.get_changeset()
 
        assert tip != newtip
 
        assert tip.raw_id != newtip.raw_id
 
        assert newtip.get_node('foo/bar/baz').content == b'My **changed** content'
 

	
 
    def test_change_non_ascii(self):
 
        to_add = [
 
            FileNode('żółwik/zwierzątko', content='ćććć'),
 
            FileNode('żółwik/zwierzątko_uni', content='ćććć'),
 
        ]
 
        for node in to_add:
 
            self.imc.add(node)
 

	
 
        tip = self.imc.commit('Initial', 'joe.doe@example.com')
 

	
 
        # Change node's content
 
        node = FileNode('żółwik/zwierzątko', content='My **changed** content')
 
        self.imc.change(node)
 
        self.imc.commit('Changed %s' % node.path, 'joe.doe@example.com')
 

	
 
        node = FileNode('żółwik/zwierzątko_uni', content='My **changed** content')
 
        self.imc.change(node)
 
        self.imc.commit('Changed %s' % node.path, 'joe.doe@example.com')
 

	
 
        newtip = self.repo.get_changeset()
 
        assert tip != newtip
 
        assert tip.raw_id != newtip.raw_id
 

	
 
        assert newtip.get_node('żółwik/zwierzątko').content == b'My **changed** content'
 
        assert newtip.get_node('żółwik/zwierzątko_uni').content == b'My **changed** content'
 

	
 
    def test_change_raise_empty_repository(self):
 
        node = FileNode('foobar')
 
        with pytest.raises(EmptyRepositoryError):
 
            self.imc.change(node)
 

	
 
    def test_check_integrity_change_raise_node_does_not_exist(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message='Added foobar', author=str(self))
 
        node = FileNode('not-foobar', content='')
 
        self.imc.change(node)
 
        with pytest.raises(NodeDoesNotExistError):
 
            self.imc.commit(message='Changed not existing node', author=str(self))
 

	
 
    def test_change_raise_node_already_changed(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message='Added foobar', author=str(self))
 
        node = FileNode('foobar', content='more baz')
 
        self.imc.change(node)
 
        with pytest.raises(NodeAlreadyChangedError):
 
            self.imc.change(node)
 

	
 
    def test_check_integrity_change_raise_node_not_changed(self):
 
        self.test_add()  # Performs first commit
 

	
 
        node = FileNode(self.nodes[0].path, content=self.nodes[0].content)
 
        self.imc.change(node)
 
        with pytest.raises(NodeNotChangedError):
 
            self.imc.commit(
 
                message='Trying to mark node as changed without touching it',
 
                author=str(self),
 
            )
 

	
 
    def test_change_raise_node_already_removed(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message='Added foobar', author=str(self))
 
        self.imc.remove(FileNode('foobar'))
 
        with pytest.raises(NodeAlreadyRemovedError):
 
            self.imc.change(node)
 

	
 
    def test_remove(self):
 
        self.test_add()  # Performs first commit
 

	
 
        tip = self.repo.get_changeset()
 
        node = self.nodes[0]
 
        assert node.content == tip.get_node(node.path).content
 
        self.imc.remove(node)
 
        self.imc.commit(message='Removed %s' % node.path, author=str(self))
 

	
 
        newtip = self.repo.get_changeset()
 
        assert tip != newtip
 
        assert tip.raw_id != newtip.raw_id
 
        with pytest.raises(NodeDoesNotExistError):
 
            newtip.get_node(node.path)
 

	
 
    def test_remove_last_file_from_directory(self):
 
        node = FileNode('omg/qwe/foo/bar', content='foobar')
 
        self.imc.add(node)
 
        self.imc.commit('added', 'joe doe')
 

	
 
        self.imc.remove(node)
 
        tip = self.imc.commit('removed', 'joe doe')
 
        with pytest.raises(NodeDoesNotExistError):
 
            tip.get_node('omg/qwe/foo/bar')
 

	
 
    def test_remove_raise_node_does_not_exist(self):
 
        self.imc.remove(self.nodes[0])
 
        with pytest.raises(NodeDoesNotExistError):
 
            self.imc.commit(
 
                message='Trying to remove node at empty repository',
 
                author=str(self),
 
            )
 

	
 
    def test_check_integrity_remove_raise_node_does_not_exist(self):
 
        self.test_add()  # Performs first commit
 

	
 
        node = FileNode('no-such-file')
 
        self.imc.remove(node)
 
        with pytest.raises(NodeDoesNotExistError):
 
            self.imc.commit(
 
                message='Trying to remove not existing node',
 
                author=str(self),
 
            )
 

	
 
    def test_remove_raise_node_already_removed(self):
 
        self.test_add() # Performs first commit
 

	
 
        node = FileNode(self.nodes[0].path)
 
        self.imc.remove(node)
 
        with pytest.raises(NodeAlreadyRemovedError):
 
            self.imc.remove(node)
 

	
 
    def test_remove_raise_node_already_changed(self):
 
        self.test_add()  # Performs first commit
 

	
 
        node = FileNode(self.nodes[0].path, content='Bending time')
 
        self.imc.change(node)
 
        with pytest.raises(NodeAlreadyChangedError):
 
            self.imc.remove(node)
 

	
 
    def test_reset(self):
 
        self.imc.add(FileNode('foo', content='bar'))
 
        #self.imc.change(FileNode('baz', content='new'))
 
        #self.imc.remove(FileNode('qwe'))
 
        self.imc.reset()
 
        assert not any((
 
            self.imc.added,
 
            self.imc.changed,
 
            self.imc.removed
 
        ))
 

	
 
    def test_multiple_commits(self):
 
        N = 3  # number of commits to perform
 
        last = None
 
        for x in range(N):
 
            fname = 'file%s' % str(x).rjust(5, '0')
 
            content = 'foobar\n' * x
 
            node = FileNode(fname, content=content)
 
            self.imc.add(node)
 
            commit = self.imc.commit("Commit no. %s" % (x + 1), author='vcs')
 
            assert last != commit
 
            last = commit
 

	
 
        # Check commit number for same repo
 
        assert len(self.repo.revisions) == N
 

	
 
        # Check commit number for recreated repo
 
        assert len(self.repo.revisions) == N
 

	
 
    def test_date_attr(self):
 
        node = FileNode('foobar.txt', content='Foobared!')
 
        self.imc.add(node)
 
        date = datetime.datetime(1985, 1, 30, 1, 45)
 
        commit = self.imc.commit("Committed at time when I was born ;-)",
 
            author='lb <lb@example.com>', date=date)
 

	
 
        assert commit.date == date
 

	
 

	
 
class TestGitInMemoryChangeset(InMemoryChangesetTestMixin):
 
    backend_alias = 'git'
 

	
 

	
 
class TestHgInMemoryChangeset(InMemoryChangesetTestMixin):
 
    backend_alias = 'hg'
scripts/whitespacecleanup.sh
Show inline comments
 
#!/bin/bash -x
 

	
 
# Enforce some consistency in whitespace - just to avoid spurious whitespaces changes
 

	
 
files=`hg mani | egrep -v '/fontello/|/email_templates/|(^LICENSE-MERGELY.html|^docs/Makefile|^scripts/whitespacecleanup.sh|/(graph|mergely|native.history)\.js|/test_dump_html_mails.ref.html|\.png|\.gif|\.ico|\.pot|\.po|\.mo|\.tar\.gz|\.diff)$'`
 

	
 
sed -i "s/`printf '\r'`//g" $files
 
sed -i -e "s,`printf '\t'`,    ,g" $files
 
sed -i -e "s,  *$,,g" $files
 
sed -i -e 's,\([^ ]\)\\$,\1 \\,g' -e 's,\(["'"'"']["'"'"']["'"'"']\) \\$,\1\\,g' $files
 
# ensure one trailing newline - remove empty last line and make last line include trailing newline:
 
sed -i -e '$,${/^$/d}' -e '$a\' $files
 

	
 
sed -i -e 's,\([^ /]\){,\1 {,g' `hg loc '*.css'`
 
sed -i -e 's|^\([^ /].*,\)\([^ ]\)|\1 \2|g' `hg loc '*.css'`
 

	
 
hg mani | xargs chmod -x
 
hg loc 'set:!binary()&grep("^#!")&!(**_tmpl.py)&!(**/template**)' | xargs chmod +x
 

	
 
# isort is installed from dev_requirements.txt
 
hg loc 'set:!binary()&grep("^#!.*python")' '*.py' | xargs isort --line-width 160 --wrap-length 160 --lines-after-imports 2
 
hg loc 'set:!binary()&grep("^#!.*python")' '*.py' | xargs isort --line-width 160 --lines-after-imports 2
 

	
 
hg diff
0 comments (0 inline, 0 general)