Changeset - c5829bc7d090
kallithea/tests/api/api_base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
tests for api. run with::
 

	
 
    KALLITHEA_WHOOSH_TEST_DISABLE=1 nosetests --with-coverage --cover-package=kallithea.controllers.api.api -x kallithea/tests/api
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import random
 
import mock
 

	
 
from kallithea.tests import *
 
from kallithea.tests.fixture import Fixture
 
from kallithea.lib.compat import json
 
from kallithea.lib.auth import AuthUser
 
from kallithea.model.user import UserModel
 
from kallithea.model.user_group import UserGroupModel
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.meta import Session
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.gist import GistModel
 
from kallithea.model.db import Repository, User, Setting
 
from kallithea.lib.utils2 import time_to_datetime
 

	
 

	
 
API_URL = '/_admin/api'
 
TEST_USER_GROUP = 'test_user_group'
 
TEST_REPO_GROUP = 'test_repo_group'
 

	
 
fixture = Fixture()
 

	
 

	
 
def _build_data(apikey, method, **kw):
 
    """
 
    Builds API data with given random ID
 

	
 
    :param random_id:
 
    """
 
    random_id = random.randrange(1, 9999)
 
    return random_id, json.dumps({
 
        "id": random_id,
 
        "api_key": apikey,
 
        "method": method,
 
        "args": kw
 
    })
 

	
 

	
 
jsonify = lambda obj: json.loads(json.dumps(obj))
 

	
 

	
 
def crash(*args, **kwargs):
 
    raise Exception('Total Crash !')
 

	
 

	
 
def api_call(test_obj, params):
 
    response = test_obj.app.post(API_URL, content_type='application/json',
 
                                 params=params)
 
    return response
 

	
 

	
 
## helpers
 
def make_user_group(name=TEST_USER_GROUP):
 
    gr = fixture.create_user_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
 
    UserGroupModel().add_user_to_group(user_group=gr,
 
                                       user=TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
def make_repo_group(name=TEST_REPO_GROUP):
 
    gr = fixture.create_repo_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
class BaseTestApi(object):
 
class _BaseTestApi(object):
 
    REPO = None
 
    REPO_TYPE = None
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        cls.usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
 
        cls.apikey = cls.usr.api_key
 
        cls.test_user = UserModel().create_or_update(
 
            username='test-api',
 
            password='test',
 
            email='test@example.com',
 
            firstname='first',
 
            lastname='last'
 
        )
 
        Session().commit()
 
        cls.TEST_USER_LOGIN = cls.test_user.username
 
        cls.apikey_regular = cls.test_user.api_key
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def setUp(self):
 
        self.maxDiff = None
 
        make_user_group()
 
        make_repo_group()
 

	
 
    def tearDown(self):
 
        fixture.destroy_user_group(TEST_USER_GROUP)
 
        fixture.destroy_gists()
 
        fixture.destroy_repo_group(TEST_REPO_GROUP)
 

	
 
    def _compare_ok(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': None,
 
            'result': expected
 
        })
 
        given = json.loads(given)
 
        self.assertEqual(expected, given)
 

	
 
    def _compare_error(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': expected,
 
            'result': None
 
        })
 
        given = json.loads(given)
 
        self.assertEqual(expected, given)
 

	
 
    def test_Optional_object(self):
 
        from kallithea.controllers.api.api import Optional
 

	
 
        option1 = Optional(None)
 
        self.assertEqual('<Optional:%s>' % None, repr(option1))
 
        self.assertEqual(option1(), None)
 

	
 
        self.assertEqual(1, Optional.extract(Optional(1)))
 
        self.assertEqual('trololo', Optional.extract('trololo'))
 

	
 
    def test_Optional_OAttr(self):
 
        from kallithea.controllers.api.api import Optional, OAttr
 

	
 
        option1 = Optional(OAttr('apiuser'))
 
        self.assertEqual('apiuser', Optional.extract(option1))
 

	
 
    def test_OAttr_object(self):
 
        from kallithea.controllers.api.api import OAttr
 

	
 
        oattr1 = OAttr('apiuser')
 
        self.assertEqual('<OptionalAttr:apiuser>', repr(oattr1))
 
        self.assertEqual(oattr1(), oattr1)
 

	
 
    def test_api_wrong_key(self):
 
        id_, params = _build_data('trololo', 'get_user')
 
        response = api_call(self, params)
 

	
 
        expected = 'Invalid API KEY'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param_args_null(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        params = params.replace('"args": {}', '"args": null')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param_args_bad(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        params = params.replace('"args": {}', '"args": 1')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_args_is_null(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        params = params.replace('"args": {}', '"args": null')
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_api_args_is_bad(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        params = params.replace('"args": {}', '"args": 1')
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_api_args_different_args(self):
 
        import string
 
        expected = {
 
            'ascii_letters': string.ascii_letters,
 
            'ws': string.whitespace,
 
            'printables': string.printable
 
        }
 
        id_, params = _build_data(self.apikey, 'test', args=expected)
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 
        self._compare_ok(id_, expected, response.body)
 

	
 
    def test_api_get_users(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        response = api_call(self, params)
 
        ret_all = []
 
        _users = User.query().filter(User.username != User.DEFAULT_USER) \
 
            .order_by(User.username).all()
 
        for usr in _users:
 
            ret = usr.get_api_data()
 
            ret_all.append(jsonify(ret))
 
        expected = ret_all
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(usr.user_id).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_that_does_not_exist(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "user `%s` does not exist" % 'trololo'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid(self):
 
        id_, params = _build_data(self.apikey, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(usr.user_id).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = UserModel().get_by_username(self.TEST_USER_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(usr.user_id).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_with_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user',
 
                                  userid=self.TEST_USER_LOGIN)
 
        response = api_call(self, params)
 

	
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_pull(self):
 
        repo_name = 'test_pull'
 
        r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        r.clone_uri = os.path.join(TESTS_TMP_PATH, self.REPO)
 
        Session.add(r)
 
        Session.commit()
 

	
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=repo_name,)
 
        response = api_call(self, params)
 

	
 
        expected = {'msg': 'Pulled from `%s`' % repo_name,
 
                    'repository': repo_name}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_pull_error(self):
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=self.REPO, )
 
        response = api_call(self, params)
 

	
 
        expected = 'Unable to pull changes from `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_rescan_repos(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos')
 
        response = api_call(self, params)
 

	
 
        expected = {'added': [], 'removed': []}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'repo_scan', crash)
 
    def test_api_rescann_error(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos', )
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during rescan repositories action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached()  # seed cache
 

	
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = {
 
            'msg': "Cache for repository `%s` was invalidated" % (self.REPO,),
 
            'repository': self.REPO
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'mark_for_invalidation', crash)
 
    def test_api_invalidate_cache_error(self):
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during cache invalidation action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache_regular_user_no_permission(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached() # seed cache
 

	
 
        id_, params = _build_data(self.apikey_regular, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = "repository `%s` does not exist" % (self.REPO,)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = {
 
            'repo': self.REPO, 'locked': True,
 
            'locked_since': response.json['result']['locked_since'],
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_by_non_admin(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = {
 
                'repo': repo_name,
 
                'locked': True,
 
                'locked_since': response.json['result']['locked_since'],
 
                'locked_by': self.TEST_USER_LOGIN,
 
                'lock_state_changed': True,
 
                'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                        % (self.TEST_USER_LOGIN, repo_name, True))
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_with_userid(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      userid=TEST_USER_ADMIN_LOGIN,
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = 'userid is not the same as your user'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_not_his_repo(self):
 
        id_, params = _build_data(self.apikey_regular, 'lock',
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_release(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=False)
 
        response = api_call(self, params)
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': False,
 
            'locked_since': None,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, False))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_optional_userid(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        time_ = response.json['result']['locked_since']
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': True,
 
            'locked_since': time_,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        }
 

	
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_optional_locked(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 
        time_ = response.json['result']['locked_since']
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': True,
 
            'locked_since': time_,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': False,
 
            'msg': ('Repo `%s` locked by `%s` on `%s`.'
 
                    % (self.REPO, TEST_USER_ADMIN_LOGIN,
 
                       json.dumps(time_to_datetime(time_))))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_optional_not_locked(self):
 
        repo_name = 'api_not_locked'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        self.assertEqual(repo.locked, [None, None])
 
        try:
 
            id_, params = _build_data(self.apikey, 'lock',
 
                                      repoid=repo.repo_id)
kallithea/tests/api/test_api_git.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from kallithea.tests import *
 
from kallithea.tests.api.api_base import BaseTestApi
 
from kallithea.tests import TestController, GIT_REPO
 
from kallithea.tests.api.api_base import _BaseTestApi
 

	
 

	
 
class TestGitApi(BaseTestApi, TestController):
 
class TestGitApi(_BaseTestApi, TestController):
 
    REPO = GIT_REPO
 
    REPO_TYPE = 'git'
kallithea/tests/api/test_api_hg.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from kallithea.tests import *
 
from kallithea.tests.api.api_base import BaseTestApi
 
from kallithea.tests import TestController, HG_REPO
 
from kallithea.tests.api.api_base import _BaseTestApi
 

	
 

	
 
class TestHgApi(BaseTestApi, TestController):
 
class TestHgApi(_BaseTestApi, TestController):
 
    REPO = HG_REPO
 
    REPO_TYPE = 'hg'
kallithea/tests/functional/test_admin_repos.py
Show inline comments
 
# -*- coding: utf-8 -*-
 

	
 
import os
 
import mock
 
import urllib
 

	
 
from kallithea.lib import vcs
 
from kallithea.model.db import Repository, RepoGroup, UserRepoToPerm, User,\
 
    Permission
 
from kallithea.model.user import UserModel
 
from kallithea.tests import *
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.meta import Session
 
from kallithea.tests.fixture import Fixture, error_function
 

	
 
fixture = Fixture()
 

	
 

	
 
def _get_permission_for_user(user, repo):
 
    perm = UserRepoToPerm.query()\
 
                .filter(UserRepoToPerm.repository ==
 
                        Repository.get_by_repo_name(repo))\
 
                .filter(UserRepoToPerm.user == User.get_by_username(user))\
 
                .all()
 
    return perm
 

	
 

	
 
class _BaseTest(TestController):
 
class _BaseTest(object):
 
    """
 
    Write all tests here
 
    """
 
    REPO = None
 
    REPO_TYPE = None
 
    NEW_REPO = None
 
    OTHER_TYPE_REPO = None
 
    OTHER_TYPE = None
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        pass
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(url('repos'))
 

	
 
    def test_create(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _authentication_token=self.authentication_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 

	
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name)
 
        Session().commit()
 

	
 
    def test_create_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (self.NEW_REPO, non_ascii)
 
        repo_name_unicode = repo_name.decode('utf8')
 
        description = 'description for newly created repo' + non_ascii
 
        description_unicode = description.decode('utf8')
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _authentication_token=self.authentication_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.quote(repo_name), repo_name_unicode))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_unicode)
 
        self.assertEqual(new_repo.description, description_unicode)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
    def test_create_in_group(self):
 
        self.log_user()
 

	
 
        ## create GROUP
 
        group_name = 'sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description='test',
 
                                     owner=TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 

	
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,
 
                                                _authentication_token=self.authentication_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        inherited_perms = UserRepoToPerm.query()\
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        self.assertEqual(len(inherited_perms), 1)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()
 

	
 
    def test_create_in_group_without_needed_permissions(self):
 
        usr = self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
 
        # avoid spurious RepoGroup DetachedInstanceError ...
 
        authentication_token = self.authentication_token()
 
        # revoke
 
        user_model = UserModel()
 
        # disable fork and create on default user
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
 

	
 
        # disable on regular user
 
        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
 
        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
 
        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
 
        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
 
        Session().commit()
 

	
 
        ## create GROUP
 
        group_name = 'reg_sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description='test',
 
                                     owner=TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 

	
 
        group_name_allowed = 'reg_sometest_allowed_%s' % self.REPO_TYPE
 
        gr_allowed = RepoGroupModel().create(group_name=group_name_allowed,
 
                                     group_description='test',
 
                                     owner=TEST_USER_REGULAR_LOGIN)
 
        Session().commit()
 

	
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,
 
                                                _authentication_token=authentication_token))
 

	
 
        response.mustcontain('Invalid value')
 

	
 
        # user is allowed to create in this group
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name_allowed, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr_allowed.group_id,
 
                                                _authentication_token=authentication_token))
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        inherited_perms = UserRepoToPerm.query()\
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        self.assertEqual(len(inherited_perms), 1)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        RepoGroupModel().delete(group_name_allowed)
 
        Session().commit()
 

	
 
    def test_create_in_group_inherit_permissions(self):
 
        self.log_user()
 

	
 
        ## create GROUP
 
        group_name = 'sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description='test',
 
                                     owner=TEST_USER_ADMIN_LOGIN)
 
        perm = Permission.get_by_key('repository.write')
 
        RepoGroupModel().grant_user_permission(gr, TEST_USER_REGULAR_LOGIN, perm)
 

	
 
        ## add repo permissions
 
        Session().commit()
 

	
 
        repo_name = 'ingroup_inherited_%s' % self.REPO_TYPE
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,
 
                                                repo_copy_permissions=True,
 
                                                _authentication_token=self.authentication_token()))
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        #check if inherited permissiona are applied
 
        inherited_perms = UserRepoToPerm.query()\
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        self.assertEqual(len(inherited_perms), 2)
 

	
 
        self.assertTrue(TEST_USER_REGULAR_LOGIN in [x.user.username
 
                                                    for x in inherited_perms])
 
        self.assertTrue('repository.write' in [x.permission.permission_name
 
                                               for x in inherited_perms])
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()
 

	
 
    def test_create_remote_repo_wrong_clone_uri(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                clone_uri='http://127.0.0.1/repo',
 
                                                _authentication_token=self.authentication_token()))
 
        response.mustcontain('invalid clone URL')
 

	
 

	
 
    def test_create_remote_repo_wrong_clone_uri_hg_svn(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                clone_uri='svn+http://127.0.0.1/repo',
 
                                                _authentication_token=self.authentication_token()))
 
        response.mustcontain('invalid clone URL')
 

	
 

	
 
    def test_delete(self):
 
        self.log_user()
 
        repo_name = 'vcs_test_new_to_delete_%s' % self.REPO_TYPE
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_name=repo_name,
 
                                                repo_description=description,
 
                                                _authentication_token=self.authentication_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.delete(url('repo', repo_name=repo_name))
 

	
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
 

	
 
        response.follow()
 

	
 
        #check if repo was deleted from db
 
        deleted_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        self.assertEqual(deleted_repo, None)
 

	
 
        self.assertEqual(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)),
 
                                  False)
 

	
 
    def test_delete_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (self.NEW_REPO, non_ascii)
 
        repo_name_unicode = repo_name.decode('utf8')
 
        description = 'description for newly created repo' + non_ascii
 
        description_unicode = description.decode('utf8')
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _authentication_token=self.authentication_token()))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.quote(repo_name), repo_name_unicode))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_unicode)
 
        self.assertEqual(new_repo.description, description_unicode)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.delete(url('repo', repo_name=repo_name))
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name_unicode))
 
        response.follow()
 

	
 
        #check if repo was deleted from db
 
        deleted_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).scalar()
 

	
 
        self.assertEqual(deleted_repo, None)
 

	
 
        self.assertEqual(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)),
 
                                  False)
 

	
 
    def test_delete_repo_with_group(self):
 
        #TODO:
 
        pass
 

	
 
    def test_delete_browser_fakeout(self):
 
        response = self.app.post(url('repo', repo_name=self.REPO),
 
                                 params=dict(_method='delete', _authentication_token=self.authentication_token()))
 

	
 
    def test_show(self):
 
        self.log_user()
 
        response = self.app.get(url('repo', repo_name=self.REPO))
 

	
 
    def test_edit(self):
 
        response = self.app.get(url('edit_repo', repo_name=self.REPO))
 

	
 
    def test_set_private_flag_sets_default_to_none(self):
 
        self.log_user()
 
        #initially repository perm should be read
 
        perm = _get_permission_for_user(user='default', repo=self.REPO)
 
        self.assertTrue(len(perm), 1)
 
        self.assertEqual(perm[0].permission.permission_name, 'repository.read')
 
        self.assertEqual(Repository.get_by_repo_name(self.REPO).private, False)
 

	
 
        response = self.app.put(url('repo', repo_name=self.REPO),
 
                        fixture._get_repo_create_params(repo_private=1,
 
                                                repo_name=self.REPO,
 
                                                repo_type=self.REPO_TYPE,
 
                                                user=TEST_USER_ADMIN_LOGIN,
 
                                                _authentication_token=self.authentication_token()))
 
        self.checkSessionFlash(response,
 
                               msg='Repository %s updated successfully' % (self.REPO))
 
        self.assertEqual(Repository.get_by_repo_name(self.REPO).private, True)
 

	
 
        #now the repo default permission should be None
 
        perm = _get_permission_for_user(user='default', repo=self.REPO)
 
        self.assertTrue(len(perm), 1)
 
        self.assertEqual(perm[0].permission.permission_name, 'repository.none')
 

	
 
        response = self.app.put(url('repo', repo_name=self.REPO),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=self.REPO,
 
                                                repo_type=self.REPO_TYPE,
 
                                                user=TEST_USER_ADMIN_LOGIN,
 
                                                _authentication_token=self.authentication_token()))
 
        self.checkSessionFlash(response,
 
                               msg='Repository %s updated successfully' % (self.REPO))
 
        self.assertEqual(Repository.get_by_repo_name(self.REPO).private, False)
 

	
 
        #we turn off private now the repo default permission should stay None
 
        perm = _get_permission_for_user(user='default', repo=self.REPO)
 
        self.assertTrue(len(perm), 1)
 
        self.assertEqual(perm[0].permission.permission_name, 'repository.none')
 

	
 
        #update this permission back
 
        perm[0].permission = Permission.get_by_key('repository.read')
 
        Session().add(perm[0])
 
        Session().commit()
 

	
 
    def test_set_repo_fork_has_no_self_id(self):
 
        self.log_user()
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        response = self.app.get(url('edit_repo_advanced', repo_name=self.REPO))
 
        opt = """<option value="%s">vcs_test_git</option>""" % repo.repo_id
 
        response.mustcontain(no=[opt])
 

	
 
    def test_set_fork_of_other_repo(self):
 
        self.log_user()
 
        other_repo = 'other_%s' % self.REPO_TYPE
 
        fixture.create_repo(other_repo, repo_type=self.REPO_TYPE)
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        repo2 = Repository.get_by_repo_name(other_repo)
 
        response = self.app.put(url('edit_repo_advanced_fork', repo_name=self.REPO),
 
                                params=dict(id_fork_of=repo2.repo_id, _authentication_token=self.authentication_token()))
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        repo2 = Repository.get_by_repo_name(other_repo)
 
        self.checkSessionFlash(response,
 
            'Marked repo %s as fork of %s' % (repo.repo_name, repo2.repo_name))
 

	
 
        assert repo.fork == repo2
 
        response = response.follow()
 
        # check if given repo is selected
 

	
 
        opt = """<option value="%s" selected="selected">%s</option>""" % (
 
                    repo2.repo_id, repo2.repo_name)
 
        response.mustcontain(opt)
 

	
 
        fixture.destroy_repo(other_repo, forks='detach')
 

	
 
    def test_set_fork_of_other_type_repo(self):
 
        self.log_user()
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
 
        response = self.app.put(url('edit_repo_advanced_fork', repo_name=self.REPO),
 
                                params=dict(id_fork_of=repo2.repo_id, _authentication_token=self.authentication_token()))
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
 
        self.checkSessionFlash(response,
 
            'Cannot set repository as fork of repository with other type')
 

	
 
    def test_set_fork_of_none(self):
 
        self.log_user()
 
        ## mark it as None
 
        response = self.app.put(url('edit_repo_advanced_fork', repo_name=self.REPO),
 
                                params=dict(id_fork_of=None, _authentication_token=self.authentication_token()))
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        repo2 = Repository.get_by_repo_name(self.OTHER_TYPE_REPO)
 
        self.checkSessionFlash(response,
 
                               'Marked repo %s as fork of %s'
 
                               % (repo.repo_name, "Nothing"))
 
        assert repo.fork is None
 

	
 
    def test_set_fork_of_same_repo(self):
 
        self.log_user()
 
        repo = Repository.get_by_repo_name(self.REPO)
 
        response = self.app.put(url('edit_repo_advanced_fork', repo_name=self.REPO),
 
                                params=dict(id_fork_of=repo.repo_id, _authentication_token=self.authentication_token()))
 
        self.checkSessionFlash(response,
 
                               'An error occurred during this operation')
 

	
 
    def test_create_on_top_level_without_permissions(self):
 
        usr = self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
 
        # revoke
 
        user_model = UserModel()
 
        # disable fork and create on default user
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
 

	
 
        # disable on regular user
 
        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
 
        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
 
        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
 
        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
 
        Session().commit()
 

	
 

	
 
        user = User.get(usr['user_id'])
 

	
 
        repo_name = self.NEW_REPO+'no_perms'
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _authentication_token=self.authentication_token()))
 

	
 
        response.mustcontain('no permission to create repository in root location')
 

	
 
        RepoModel().delete(repo_name)
 
        Session().commit()
 

	
 
    @mock.patch.object(RepoModel, '_create_filesystem_repo', error_function)
 
    def test_create_repo_when_filesystem_op_fails(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 

	
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                _authentication_token=self.authentication_token()))
 

	
 
        self.checkSessionFlash(response,
 
                               'Error creating repository %s' % repo_name)
 
        # repo must not be in db
 
        repo = Repository.get_by_repo_name(repo_name)
 
        self.assertEqual(repo, None)
 

	
 
        # repo must not be in filesystem !
 
        self.assertFalse(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)))
 

	
 
class TestAdminReposControllerGIT(_BaseTest):
 
class TestAdminReposControllerGIT(TestController, _BaseTest):
 
    REPO = GIT_REPO
 
    REPO_TYPE = 'git'
 
    NEW_REPO = NEW_GIT_REPO
 
    OTHER_TYPE_REPO = HG_REPO
 
    OTHER_TYPE = 'hg'
 

	
 

	
 
class TestAdminReposControllerHG(_BaseTest):
 
class TestAdminReposControllerHG(TestController, _BaseTest):
 
    REPO = HG_REPO
 
    REPO_TYPE = 'hg'
 
    NEW_REPO = NEW_HG_REPO
 
    OTHER_TYPE_REPO = GIT_REPO
 
    OTHER_TYPE = 'git'
kallithea/tests/functional/test_forks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 

	
 
import unittest
 

	
 
from kallithea.tests import *
 
from kallithea.tests.fixture import Fixture
 

	
 
from kallithea.model.db import Repository
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.user import UserModel
 
from kallithea.model.meta import Session
 

	
 
fixture = Fixture()
 

	
 
from kallithea.tests import *
 

	
 

	
 
class _BaseTest(TestController):
 
    """
 
    Write all tests here
 
    """
 
    REPO = None
 
    REPO_TYPE = None
 
    NEW_REPO = None
 
    REPO_FORK = None
 

	
 
class _BaseFixture(unittest.TestCase):
 
    @classmethod
 
    def setup_class(cls):
 
        pass
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def setUp(self):
 
        self.username = u'forkuser'
 
        self.password = u'qweqwe'
 
        self.u1 = fixture.create_user(self.username, password=self.password,
 
                                      email=u'fork_king@example.com')
 
        Session().commit()
 

	
 
    def tearDown(self):
 
        Session().delete(self.u1)
 
        Session().commit()
 

	
 

	
 
class _BaseTestCase(object):
 
    """
 
    Write all tests here
 
    """
 
    REPO = None
 
    REPO_TYPE = None
 
    NEW_REPO = None
 
    REPO_FORK = None
 

	
 
    def test_index(self):
 
        self.log_user()
 
        repo_name = self.REPO
 
        response = self.app.get(url(controller='forks', action='forks',
 
                                    repo_name=repo_name))
 

	
 
        response.mustcontain("""There are no forks yet""")
 

	
 
    def test_no_permissions_to_fork(self):
 
        usr = self.log_user(TEST_USER_REGULAR_LOGIN,
 
                            TEST_USER_REGULAR_PASS)['user_id']
 
        user_model = UserModel()
 
        user_model.revoke_perm(usr, 'hg.fork.repository')
 
        user_model.grant_perm(usr, 'hg.fork.none')
 
        u = UserModel().get(usr)
 
        u.inherit_default_permissions = False
 
        Session().commit()
 
        # try create a fork
 
        repo_name = self.REPO
 
        self.app.post(url(controller='forks', action='fork_create',
 
                          repo_name=repo_name), {'_authentication_token': self.authentication_token()}, status=403)
 

	
 
    def test_index_with_fork(self):
 
        self.log_user()
 

	
 
        # create a fork
 
        fork_name = self.REPO_FORK
 
        description = 'fork of vcs test'
 
        repo_name = self.REPO
 
        org_repo = Repository.get_by_repo_name(repo_name)
 
        creation_args = {
 
            'repo_name': fork_name,
 
            'repo_group': '',
 
            'fork_parent_id': org_repo.repo_id,
 
            'repo_type': self.REPO_TYPE,
 
            'description': description,
 
            'private': 'False',
 
            'landing_rev': 'rev:tip',
 
            '_authentication_token': self.authentication_token()}
 

	
 
        self.app.post(url(controller='forks', action='fork_create',
 
                          repo_name=repo_name), creation_args)
 

	
 
        response = self.app.get(url(controller='forks', action='forks',
 
                                    repo_name=repo_name))
 

	
 
        response.mustcontain(
 
            """<a href="/%s">%s</a>""" % (fork_name, fork_name)
 
        )
 

	
 
        # remove this fork
 
        response = self.app.delete(url('repo', repo_name=fork_name))
 

	
 
    def test_fork_create_into_group(self):
 
        self.log_user()
 
        group = fixture.create_repo_group('vc')
 
        group_id = group.group_id
 
        fork_name = self.REPO_FORK
 
        fork_name_full = 'vc/%s' % fork_name
 
        description = 'fork of vcs test'
 
        repo_name = self.REPO
 
        org_repo = Repository.get_by_repo_name(repo_name)
 
        creation_args = {
 
            'repo_name': fork_name,
 
            'repo_group': group_id,
 
            'fork_parent_id': org_repo.repo_id,
 
            'repo_type': self.REPO_TYPE,
 
            'description': description,
 
            'private': 'False',
 
            'landing_rev': 'rev:tip',
 
            '_authentication_token': self.authentication_token()}
 
        self.app.post(url(controller='forks', action='fork_create',
 
                          repo_name=repo_name), creation_args)
 
        repo = Repository.get_by_repo_name(fork_name_full)
 
        assert repo.fork.repo_name == self.REPO
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=fork_name_full))
 
        #test if we have a message that fork is ok
 
        self.checkSessionFlash(response,
 
                'Forked repository %s as <a href="/%s">%s</a>'
 
                % (repo_name, fork_name_full, fork_name_full))
 

	
 
        #test if the fork was created in the database
 
        fork_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == fork_name_full).one()
 

	
 
        self.assertEqual(fork_repo.repo_name, fork_name_full)
 
        self.assertEqual(fork_repo.fork.repo_name, repo_name)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=fork_name_full))
 
        response.mustcontain(fork_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 
        response.mustcontain('Fork of "<a href="/%s">%s</a>"' % (repo_name, repo_name))
 

	
 
        fixture.destroy_repo(fork_name_full)
 
        fixture.destroy_repo_group(group_id)
 

	
 
    def test_z_fork_create(self):
 
        self.log_user()
 
        fork_name = self.REPO_FORK
 
        description = 'fork of vcs test'
 
        repo_name = self.REPO
 
        org_repo = Repository.get_by_repo_name(repo_name)
 
        creation_args = {
 
            'repo_name': fork_name,
 
            'repo_group': '',
 
            'fork_parent_id': org_repo.repo_id,
 
            'repo_type': self.REPO_TYPE,
 
            'description': description,
 
            'private': 'False',
 
            'landing_rev': 'rev:tip',
 
            '_authentication_token': self.authentication_token()}
 
        self.app.post(url(controller='forks', action='fork_create',
 
                          repo_name=repo_name), creation_args)
 
        repo = Repository.get_by_repo_name(self.REPO_FORK)
 
        assert repo.fork.repo_name == self.REPO
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=fork_name))
 
        #test if we have a message that fork is ok
 
        self.checkSessionFlash(response,
 
                'Forked repository %s as <a href="/%s">%s</a>'
 
                % (repo_name, fork_name, fork_name))
 

	
 
        #test if the fork was created in the database
 
        fork_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == fork_name).one()
 

	
 
        self.assertEqual(fork_repo.repo_name, fork_name)
 
        self.assertEqual(fork_repo.fork.repo_name, repo_name)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=fork_name))
 
        response.mustcontain(fork_name)
 
        response.mustcontain(self.REPO_TYPE)
 
        response.mustcontain('Fork of "<a href="/%s">%s</a>"' % (repo_name, repo_name))
 

	
 
    def test_zz_fork_permission_page(self):
 
        usr = self.log_user(self.username, self.password)['user_id']
 
        repo_name = self.REPO
 

	
 
        forks = Repository.query()\
 
            .filter(Repository.repo_type == self.REPO_TYPE)\
 
            .filter(Repository.fork_id != None).all()
 
        self.assertEqual(1, len(forks))
 

	
 
        # set read permissions for this
 
        RepoModel().grant_user_permission(repo=forks[0],
 
                                          user=usr,
 
                                          perm='repository.read')
 
        Session().commit()
 

	
 
        response = self.app.get(url(controller='forks', action='forks',
 
                                    repo_name=repo_name))
 

	
 
        response.mustcontain('<div style="padding:5px 3px 3px 42px;">fork of vcs test</div>')
 

	
 
    def test_zzz_fork_permission_page(self):
 
        usr = self.log_user(self.username, self.password)['user_id']
 
        repo_name = self.REPO
 

	
 
        forks = Repository.query()\
 
            .filter(Repository.repo_type == self.REPO_TYPE)\
 
            .filter(Repository.fork_id != None).all()
 
        self.assertEqual(1, len(forks))
 

	
 
        # set none
 
        RepoModel().grant_user_permission(repo=forks[0],
 
                                          user=usr, perm='repository.none')
 
        Session().commit()
 
        # fork shouldn't be there
 
        response = self.app.get(url(controller='forks', action='forks',
 
                                    repo_name=repo_name))
 
        response.mustcontain('There are no forks yet')
 

	
 

	
 
class TestGIT(_BaseTest):
 
class TestGIT(TestController, _BaseTestCase, _BaseFixture):
 
    REPO = GIT_REPO
 
    NEW_REPO = NEW_GIT_REPO
 
    REPO_TYPE = 'git'
 
    REPO_FORK = GIT_FORK
 

	
 

	
 
class TestHG(_BaseTest):
 
class TestHG(TestController, _BaseTestCase, _BaseFixture):
 
    REPO = HG_REPO
 
    NEW_REPO = NEW_HG_REPO
 
    REPO_TYPE = 'hg'
 
    REPO_FORK = HG_FORK
kallithea/tests/vcs/base.py
Show inline comments
 
"""
 
Module providing backend independent mixin class. It requires that
 
InMemoryChangeset class is working properly at backend class.
 
"""
 
import os
 
import time
 
import shutil
 
import datetime
 
from kallithea.tests.vcs.conf import SCM_TESTS, get_new_dir
 

	
 
from kallithea.lib import vcs
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.nodes import FileNode
 

	
 

	
 
class BackendTestMixin(object):
 
class _BackendTestMixin(object):
 
    """
 
    This is a backend independent test case class which should be created
 
    with ``type`` method.
 

	
 
    It is required to set following attributes at subclass:
 

	
 
    - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
 
    - ``repo_path``: path to the repository which would be created for set of
 
      tests
 
    - ``recreate_repo_per_test``: If set to ``False``, repo would NOT be created
 
      before every single test. Defaults to ``True``.
 
    """
 
    recreate_repo_per_test = True
 

	
 
    @classmethod
 
    def get_backend(cls):
 
        return vcs.get_backend(cls.backend_alias)
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': u'Initial commit',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='Foobar'),
 
                    FileNode('foobar2', content='Foobar II'),
 
                    FileNode('foo/bar/baz', content='baz here!'),
 
                ],
 
            },
 
            {
 
                'message': u'Changes...',
 
                'author': u'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('some/new.txt', content='news...'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'Foobar I'),
 
                ],
 
                'removed': [],
 
            },
 
        ]
 
        return commits
 

	
 
    @classmethod
 
    def setUpClass(cls):
 
        Backend = cls.get_backend()
 
        cls.backend_class = Backend
 
        cls.repo_path = get_new_dir(str(time.time()))
 
        cls.repo = Backend(cls.repo_path, create=True)
 
        cls.imc = cls.repo.in_memory_changeset
 
        cls.default_branch = cls.repo.DEFAULT_BRANCH_NAME
 

	
 
        for commit in cls._get_commits():
 
            for node in commit.get('added', []):
 
                cls.imc.add(FileNode(node.path, content=node.content))
 
            for node in commit.get('changed', []):
 
                cls.imc.change(FileNode(node.path, content=node.content))
 
            for node in commit.get('removed', []):
 
                cls.imc.remove(FileNode(node.path))
 

	
 
            cls.tip = cls.imc.commit(message=unicode(commit['message']),
 
                                     author=unicode(commit['author']),
 
                                     date=commit['date'])
 

	
 
    @classmethod
 
    def tearDownClass(cls):
 
        if not getattr(cls, 'recreate_repo_per_test', False) and \
 
            'VCS_REMOVE_TEST_DIRS' in os.environ:
 
            shutil.rmtree(cls.repo_path)
 

	
 
    def setUp(self):
 
        if getattr(self, 'recreate_repo_per_test', False):
 
            self.__class__.setUpClass()
 

	
 
    def tearDown(self):
 
        if getattr(self, 'recreate_repo_per_test', False) and \
 
            'VCS_REMOVE_TEST_DIRS' in os.environ:
 
            shutil.rmtree(self.repo_path)
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s base backend test' % alias).title().split())
 
    bases = (BackendTestMixin, unittest.TestCase)
 
    bases = (_BackendTestMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_archives.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
import tarfile
 
import zipfile
 
import datetime
 
import tempfile
 
import StringIO
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.exceptions import VCSError
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class ArchivesTestCaseMixin(BackendTestMixin):
 
class ArchivesTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('%d/file_%d.txt' % (x, x),
 
                        content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test_archive_zip(self):
 
        path = tempfile.mkstemp()[1]
 
        with open(path, 'wb') as f:
 
            self.tip.fill_archive(stream=f, kind='zip', prefix='repo')
 
        out = zipfile.ZipFile(path)
 

	
 
        for x in xrange(5):
 
            node_path = '%d/file_%d.txt' % (x, x)
 
            decompressed = StringIO.StringIO()
 
            decompressed.write(out.read('repo/' + node_path))
 
            self.assertEqual(
 
                decompressed.getvalue(),
 
                self.tip.get_node(node_path).content)
 

	
 
    def test_archive_tgz(self):
 
        path = tempfile.mkstemp()[1]
 
        with open(path, 'wb') as f:
 
            self.tip.fill_archive(stream=f, kind='tgz', prefix='repo')
 
        outdir = tempfile.mkdtemp()
 

	
 
        outfile = tarfile.open(path, 'r|gz')
 
        outfile.extractall(outdir)
 

	
 
        for x in xrange(5):
 
            node_path = '%d/file_%d.txt' % (x, x)
 
            self.assertEqual(
 
                open(os.path.join(outdir, 'repo/' + node_path)).read(),
 
                self.tip.get_node(node_path).content)
 

	
 
    def test_archive_tbz2(self):
 
        path = tempfile.mkstemp()[1]
 
        with open(path, 'w+b') as f:
 
            self.tip.fill_archive(stream=f, kind='tbz2', prefix='repo')
 
        outdir = tempfile.mkdtemp()
 

	
 
        outfile = tarfile.open(path, 'r|bz2')
 
        outfile.extractall(outdir)
 

	
 
        for x in xrange(5):
 
            node_path = '%d/file_%d.txt' % (x, x)
 
            self.assertEqual(
 
                open(os.path.join(outdir, 'repo/' + node_path)).read(),
 
                self.tip.get_node(node_path).content)
 

	
 
    def test_archive_default_stream(self):
 
        tmppath = tempfile.mkstemp()[1]
 
        with open(tmppath, 'w') as stream:
 
            self.tip.fill_archive(stream=stream)
 
        mystream = StringIO.StringIO()
 
        self.tip.fill_archive(stream=mystream)
 
        mystream.seek(0)
 
        with open(tmppath, 'r') as f:
 
            self.assertEqual(f.read(), mystream.read())
 

	
 
    def test_archive_wrong_kind(self):
 
        with self.assertRaises(VCSError):
 
            self.tip.fill_archive(kind='wrong kind')
 

	
 
    def test_archive_empty_prefix(self):
 
        with self.assertRaises(VCSError):
 
            self.tip.fill_archive(prefix='')
 

	
 
    def test_archive_prefix_with_leading_slash(self):
 
        with self.assertRaises(VCSError):
 
            self.tip.fill_archive(prefix='/any')
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s archive test' % alias).title().split())
 
    bases = (ArchivesTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_branches.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.lib import vcs
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.nodes import FileNode
 

	
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 

	
 
class BranchesTestCaseMixin(BackendTestMixin):
 
class BranchesTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': 'Initial commit',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='Foobar'),
 
                    FileNode('foobar2', content='Foobar II'),
 
                    FileNode('foo/bar/baz', content='baz here!'),
 
                ],
 
            },
 
            {
 
                'message': 'Changes...',
 
                'author': 'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('some/new.txt', content='news...'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'Foobar I'),
 
                ],
 
                'removed': [],
 
            },
 
        ]
 
        return commits
 

	
 
    def test_simple(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.date, datetime.datetime(2010, 1, 1, 21))
 

	
 
    def test_new_branch(self):
 
        # This check must not be removed to ensure the 'branches' LazyProperty
 
        # gets hit *before* the new 'foobar' branch got created:
 
        self.assertFalse('foobar' in self.repo.branches)
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertTrue('foobar' in self.repo.branches)
 
        self.assertEqual(foobar_tip.branch, 'foobar')
 

	
 
    def test_new_head(self):
 
        tip = self.repo.get_changeset()
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
            parents=[tip],
 
        )
 
        self.imc.change(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\nand more...\n'))
 
        newtip = self.imc.commit(
 
            message=u'At default branch',
 
            author=u'joe',
 
            branch=foobar_tip.branch,
 
            parents=[foobar_tip],
 
        )
 

	
 
        newest_tip = self.imc.commit(
 
            message=u'Merged with %s' % foobar_tip.raw_id,
 
            author=u'joe',
 
            branch=self.backend_class.DEFAULT_BRANCH_NAME,
 
            parents=[newtip, foobar_tip],
 
        )
 

	
 
        self.assertEqual(newest_tip.branch,
 
            self.backend_class.DEFAULT_BRANCH_NAME)
 

	
 
    def test_branch_with_slash_in_name(self):
 
        self.imc.add(vcs.nodes.FileNode('extrafile', content='Some data\n'))
 
        self.imc.commit(u'Branch with a slash!', author=u'joe',
 
            branch='issue/123')
 
        self.assertTrue('issue/123' in self.repo.branches)
 

	
 
    def test_branch_with_slash_in_name_and_similar_without(self):
 
        self.imc.add(vcs.nodes.FileNode('extrafile', content='Some data\n'))
 
        self.imc.commit(u'Branch with a slash!', author=u'joe',
 
            branch='issue/123')
 
        self.imc.add(vcs.nodes.FileNode('extrafile II', content='Some data\n'))
 
        self.imc.commit(u'Branch without a slash...', author=u'joe',
 
            branch='123')
 
        self.assertIn('issue/123', self.repo.branches)
 
        self.assertIn('123', self.repo.branches)
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s branches test' % alias).title().split())
 
    bases = (BranchesTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_changesets.py
Show inline comments
 
# encoding: utf8
 
from __future__ import with_statement
 

	
 
import time
 
import datetime
 
from kallithea.lib import vcs
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset
 
from kallithea.lib.vcs.nodes import (
 
    FileNode, AddedFileNodesGenerator,
 
    ChangedFileNodesGenerator, RemovedFileNodesGenerator
 
)
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError,
 
    RepositoryError, EmptyRepositoryError
 
)
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.conf import get_new_dir
 

	
 

	
 
class TestBaseChangeset(unittest.TestCase):
 

	
 
    def test_as_dict(self):
 
        changeset = BaseChangeset()
 
        changeset.id = 'ID'
 
        changeset.raw_id = 'RAW_ID'
 
        changeset.short_id = 'SHORT_ID'
 
        changeset.revision = 1009
 
        changeset.date = datetime.datetime(2011, 1, 30, 1, 45)
 
        changeset.message = 'Message of a commit'
 
        changeset.author = 'Joe Doe <joe.doe@example.com>'
 
        changeset.added = [FileNode('foo/bar/baz'), FileNode('foobar')]
 
        changeset.changed = []
 
        changeset.removed = []
 
        self.assertEqual(changeset.as_dict(), {
 
            'id': 'ID',
 
            'raw_id': 'RAW_ID',
 
            'short_id': 'SHORT_ID',
 
            'revision': 1009,
 
            'date': datetime.datetime(2011, 1, 30, 1, 45),
 
            'message': 'Message of a commit',
 
            'author': {
 
                'name': 'Joe Doe',
 
                'email': 'joe.doe@example.com',
 
            },
 
            'added': ['foo/bar/baz', 'foobar'],
 
            'changed': [],
 
            'removed': [],
 
        })
 

	
 
class ChangesetsWithCommitsTestCaseixin(BackendTestMixin):
 
class _ChangesetsWithCommitsTestCaseixin(_BackendTestMixin):
 
    recreate_repo_per_test = True
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test_new_branch(self):
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertTrue('foobar' in self.repo.branches)
 
        self.assertEqual(foobar_tip.branch, 'foobar')
 
        # 'foobar' should be the only branch that contains the new commit
 
        self.assertNotEqual(*self.repo.branches.values())
 

	
 
    def test_new_head_in_default_branch(self):
 
        tip = self.repo.get_changeset()
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
            parents=[tip],
 
        )
 
        self.imc.change(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\nand more...\n'))
 
        newtip = self.imc.commit(
 
            message=u'At default branch',
 
            author=u'joe',
 
            branch=foobar_tip.branch,
 
            parents=[foobar_tip],
 
        )
 

	
 
        newest_tip = self.imc.commit(
 
            message=u'Merged with %s' % foobar_tip.raw_id,
 
            author=u'joe',
 
            branch=self.backend_class.DEFAULT_BRANCH_NAME,
 
            parents=[newtip, foobar_tip],
 
        )
 

	
 
        self.assertEqual(newest_tip.branch,
 
            self.backend_class.DEFAULT_BRANCH_NAME)
 

	
 
    def test_get_changesets_respects_branch_name(self):
 
        tip = self.repo.get_changeset()
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        doc_changeset = self.imc.commit(
 
            message=u'New branch: docs',
 
            author=u'joe',
 
            branch='docs',
 
        )
 
        self.imc.add(vcs.nodes.FileNode('newfile', content=''))
 
        self.imc.commit(
 
            message=u'Back in default branch',
 
            author=u'joe',
 
            parents=[tip],
 
        )
 
        default_branch_changesets = self.repo.get_changesets(
 
            branch_name=self.repo.DEFAULT_BRANCH_NAME)
 
        self.assertNotIn(doc_changeset, default_branch_changesets)
 

	
 
    def test_get_changeset_by_branch(self):
 
        for branch, sha in self.repo.branches.iteritems():
 
            self.assertEqual(sha, self.repo.get_changeset(branch).raw_id)
 

	
 
    def test_get_changeset_by_tag(self):
 
        for tag, sha in self.repo.tags.iteritems():
 
            self.assertEqual(sha, self.repo.get_changeset(tag).raw_id)
 

	
 
    def test_get_changeset_parents(self):
 
        for test_rev in [1, 2, 3]:
 
            sha = self.repo.get_changeset(test_rev-1)
 
            self.assertEqual([sha], self.repo.get_changeset(test_rev).parents)
 

	
 
    def test_get_changeset_children(self):
 
        for test_rev in [1, 2, 3]:
 
            sha = self.repo.get_changeset(test_rev+1)
 
            self.assertEqual([sha], self.repo.get_changeset(test_rev).children)
 

	
 

	
 
class ChangesetsTestCaseMixin(BackendTestMixin):
 
class _ChangesetsTestCaseMixin(_BackendTestMixin):
 
    recreate_repo_per_test = False
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': u'Commit %d' % x,
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test_simple(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.date, datetime.datetime(2010, 1, 3, 20))
 

	
 
    def test_get_changesets_is_ordered_by_date(self):
 
        changesets = list(self.repo.get_changesets())
 
        ordered_by_date = sorted(changesets,
 
            key=lambda cs: cs.date)
 
        self.assertItemsEqual(changesets, ordered_by_date)
 

	
 
    def test_get_changesets_respects_start(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(start=second_id))
 
        self.assertEqual(len(changesets), 4)
 

	
 
    def test_get_changesets_numerical_id_respects_start(self):
 
        second_id = 1
 
        changesets = list(self.repo.get_changesets(start=second_id))
 
        self.assertEqual(len(changesets), 4)
 

	
 
    def test_get_changesets_includes_start_changeset(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(start=second_id))
 
        self.assertEqual(changesets[0].raw_id, second_id)
 

	
 
    def test_get_changesets_respects_end(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(end=second_id))
 
        self.assertEqual(changesets[-1].raw_id, second_id)
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_numerical_id_respects_end(self):
 
        second_id = 1
 
        changesets = list(self.repo.get_changesets(end=second_id))
 
        self.assertEqual(changesets.index(changesets[-1]), second_id)
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_respects_both_start_and_end(self):
 
        second_id = self.repo.revisions[1]
 
        third_id = self.repo.revisions[2]
 
        changesets = list(self.repo.get_changesets(start=second_id,
 
            end=third_id))
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_numerical_id_respects_both_start_and_end(self):
 
        changesets = list(self.repo.get_changesets(start=2, end=3))
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_on_empty_repo_raises_EmptyRepository_error(self):
 
        Backend = self.get_backend()
 
        repo_path = get_new_dir(str(time.time()))
 
        repo = Backend(repo_path, create=True)
 

	
 
        with self.assertRaises(EmptyRepositoryError):
 
            list(repo.get_changesets(start='foobar'))
 

	
 
    def test_get_changesets_includes_end_changeset(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(end=second_id))
 
        self.assertEqual(changesets[-1].raw_id, second_id)
 

	
 
    def test_get_changesets_respects_start_date(self):
 
        start_date = datetime.datetime(2010, 2, 1)
 
        for cs in self.repo.get_changesets(start_date=start_date):
 
            self.assertGreaterEqual(cs.date, start_date)
 

	
 
    def test_get_changesets_respects_end_date(self):
 
        start_date = datetime.datetime(2010, 1, 1)
 
        end_date = datetime.datetime(2010, 2, 1)
 
        for cs in self.repo.get_changesets(start_date=start_date,
 
                                           end_date=end_date):
 
            self.assertGreaterEqual(cs.date, start_date)
 
            self.assertLessEqual(cs.date, end_date)
 

	
 
    def test_get_changesets_respects_start_date_and_end_date(self):
 
        end_date = datetime.datetime(2010, 2, 1)
 
        for cs in self.repo.get_changesets(end_date=end_date):
 
            self.assertLessEqual(cs.date, end_date)
 

	
 
    def test_get_changesets_respects_reverse(self):
 
        changesets_id_list = [cs.raw_id for cs in
 
            self.repo.get_changesets(reverse=True)]
 
        self.assertItemsEqual(changesets_id_list, reversed(self.repo.revisions))
 

	
 
    def test_get_filenodes_generator(self):
 
        tip = self.repo.get_changeset()
 
        filepaths = [node.path for node in tip.get_filenodes_generator()]
 
        self.assertItemsEqual(filepaths, ['file_%d.txt' % x for x in xrange(5)])
 

	
 
    def test_size(self):
 
        tip = self.repo.get_changeset()
 
        size = 5 * len('Foobar N') # Size of 5 files
 
        self.assertEqual(tip.size, size)
 

	
 
    def test_author(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.author, u'Joe Doe <joe.doe@example.com>')
 

	
 
    def test_author_name(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.author_name, u'Joe Doe')
 

	
 
    def test_author_email(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.author_email, u'joe.doe@example.com')
 

	
 
    def test_get_changesets_raise_changesetdoesnotexist_for_wrong_start(self):
 
        with self.assertRaises(ChangesetDoesNotExistError):
 
            list(self.repo.get_changesets(start='foobar'))
 

	
 
    def test_get_changesets_raise_changesetdoesnotexist_for_wrong_end(self):
 
        with self.assertRaises(ChangesetDoesNotExistError):
 
            list(self.repo.get_changesets(end='foobar'))
 

	
 
    def test_get_changesets_raise_branchdoesnotexist_for_wrong_branch_name(self):
 
        with self.assertRaises(BranchDoesNotExistError):
 
            list(self.repo.get_changesets(branch_name='foobar'))
 

	
 
    def test_get_changesets_raise_repositoryerror_for_wrong_start_end(self):
 
        start = self.repo.revisions[-1]
 
        end = self.repo.revisions[0]
 
        with self.assertRaises(RepositoryError):
 
            list(self.repo.get_changesets(start=start, end=end))
 

	
 
    def test_get_changesets_numerical_id_reversed(self):
 
        with self.assertRaises(RepositoryError):
 
            [x for x in self.repo.get_changesets(start=3, end=2)]
 

	
 
    def test_get_changesets_numerical_id_respects_both_start_and_end_last(self):
 
        with self.assertRaises(RepositoryError):
 
            last = len(self.repo.revisions)
 
            list(self.repo.get_changesets(start=last-1, end=last-2))
 

	
 
    def test_get_changesets_numerical_id_last_zero_error(self):
 
        with self.assertRaises(RepositoryError):
 
            last = len(self.repo.revisions)
 
            list(self.repo.get_changesets(start=last-1, end=0))
 

	
 

	
 
class ChangesetsChangesTestCaseMixin(BackendTestMixin):
 
class _ChangesetsChangesTestCaseMixin(_BackendTestMixin):
 
    recreate_repo_per_test = False
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return [
 
            {
 
                'message': u'Initial',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foo/bar', content='foo'),
 
                    FileNode('foo/bał', content='foo'),
 
                    FileNode('foobar', content='foo'),
 
                    FileNode('qwe', content='foo'),
 
                ],
 
            },
 
            {
 
                'message': u'Massive changes',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'added': [FileNode('fallout', content='War never changes')],
 
                'changed': [
 
                    FileNode('foo/bar', content='baz'),
 
                    FileNode('foobar', content='baz'),
 
                ],
 
                'removed': [FileNode('qwe')],
 
            },
 
        ]
 

	
 
    def test_initial_commit(self):
 
        changeset = self.repo.get_changeset(0)
 
        self.assertItemsEqual(changeset.added, [
 
            changeset.get_node('foo/bar'),
 
            changeset.get_node('foo/bał'),
 
            changeset.get_node('foobar'),
 
            changeset.get_node('qwe'),
 
        ])
 
        self.assertItemsEqual(changeset.changed, [])
 
        self.assertItemsEqual(changeset.removed, [])
 

	
 
    def test_head_added(self):
 
        changeset = self.repo.get_changeset()
 
        self.assertTrue(isinstance(changeset.added, AddedFileNodesGenerator))
 
        self.assertItemsEqual(changeset.added, [
 
            changeset.get_node('fallout'),
 
        ])
 
        self.assertTrue(isinstance(changeset.changed, ChangedFileNodesGenerator))
 
        self.assertItemsEqual(changeset.changed, [
 
            changeset.get_node('foo/bar'),
 
            changeset.get_node('foobar'),
 
        ])
 
        self.assertTrue(isinstance(changeset.removed, RemovedFileNodesGenerator))
 
        self.assertEqual(len(changeset.removed), 1)
 
        self.assertEqual(list(changeset.removed)[0].path, 'qwe')
 

	
 
    def test_get_filemode(self):
 
        changeset = self.repo.get_changeset()
 
        self.assertEqual(33188, changeset.get_file_mode('foo/bar'))
 

	
 
    def test_get_filemode_non_ascii(self):
 
        changeset = self.repo.get_changeset()
 
        self.assertEqual(33188, changeset.get_file_mode('foo/bał'))
 
        self.assertEqual(33188, changeset.get_file_mode(u'foo/bał'))
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    # tests with additional commits
 
    cls_name = ''.join(('%s changesets with commits test' % alias).title().split())
 
    bases = (ChangesetsWithCommitsTestCaseixin, unittest.TestCase)
 
    bases = (_ChangesetsWithCommitsTestCaseixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
    # tests without additional commits
 
    cls_name = ''.join(('%s changesets test' % alias).title().split())
 
    bases = (ChangesetsTestCaseMixin, unittest.TestCase)
 
    bases = (_ChangesetsTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
    # tests changes
 
    cls_name = ''.join(('%s changesets changes test' % alias).title().split())
 
    bases = (ChangesetsChangesTestCaseMixin, unittest.TestCase)
 
    bases = (_ChangesetsChangesTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_getitem.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class GetitemTestCaseMixin(BackendTestMixin):
 
class GetitemTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test__getitem__last_item_is_tip(self):
 
        self.assertEqual(self.repo[-1], self.repo.get_changeset())
 

	
 
    def test__getitem__returns_correct_items(self):
 
        changesets = [self.repo[x] for x in xrange(len(self.repo.revisions))]
 
        self.assertEqual(changesets, list(self.repo.get_changesets()))
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s getitem test' % alias).title().split())
 
    bases = (GetitemTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_getslice.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class GetsliceTestCaseMixin(BackendTestMixin):
 
class GetsliceTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test__getslice__last_item_is_tip(self):
 
        self.assertEqual(list(self.repo[-1:])[0], self.repo.get_changeset())
 

	
 
    def test__getslice__respects_start_index(self):
 
        self.assertEqual(list(self.repo[2:]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[2:]])
 

	
 
    def test__getslice__respects_negative_start_index(self):
 
        self.assertEqual(list(self.repo[-2:]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[-2:]])
 

	
 
    def test__getslice__respects_end_index(self):
 
        self.assertEqual(list(self.repo[:2]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[:2]])
 

	
 
    def test__getslice__respects_negative_end_index(self):
 
        self.assertEqual(list(self.repo[:-2]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[:-2]])
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s getslice test' % alias).title().split())
 
    bases = (GetsliceTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_git.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
import mock
 
import datetime
 
import urllib2
 
from kallithea.lib.vcs.backends.git import GitRepository, GitChangeset
 
from kallithea.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
 
from kallithea.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
 

	
 

	
 
class GitRepositoryTest(unittest.TestCase):
 

	
 
    def __check_for_existing_repo(self):
 
        if os.path.exists(TEST_GIT_REPO_CLONE):
 
            self.fail('Cannot test git clone repo as location %s already '
 
                      'exists. You should manually remove it first.'
 
                      % TEST_GIT_REPO_CLONE)
 

	
 
    def setUp(self):
 
        self.repo = GitRepository(TEST_GIT_REPO)
 

	
 
    def test_wrong_repo_path(self):
 
        wrong_repo_path = '/tmp/errorrepo'
 
        self.assertRaises(RepositoryError, GitRepository, wrong_repo_path)
 

	
 
    def test_git_cmd_injection(self):
 
        repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
 
        with self.assertRaises(urllib2.URLError):
 
            # Should fail because URL will contain the parts after ; too
 
            urlerror_fail_repo = GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
 

	
 
        with self.assertRaises(RepositoryError):
 
            # Should fail on direct clone call, which as of this writing does not happen outside of class
 
            clone_fail_repo = GitRepository(get_new_dir('injection-repo'), create=True)
 
            clone_fail_repo.clone(repo_inject_path, update_after_clone=True,)
 

	
 
        # Verify correct quoting of evil characters that should work on posix file systems
 
        tricky_path = get_new_dir("tricky-path-repo-$'\"`")
 
        successfully_cloned = GitRepository(tricky_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
 
        # Repo should have been created
 
        self.assertFalse(successfully_cloned._repo.bare)
 

	
 
        tricky_path_2 = get_new_dir("tricky-path-2-repo-$'\"`")
 
        successfully_cloned2 = GitRepository(tricky_path_2, src_url=tricky_path, bare=True, create=True)
 
        # Repo should have been created and thus used correct quoting for clone
 
        self.assertTrue(successfully_cloned2._repo.bare)
 

	
 
        # Should pass because URL has been properly quoted
 
        successfully_cloned.pull(tricky_path_2)
 
        successfully_cloned2.fetch(tricky_path)
 

	
 
    def test_repo_clone(self):
 
        self.__check_for_existing_repo()
 
        repo = GitRepository(TEST_GIT_REPO)
 
        repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
 
            src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 
        # Checking hashes of changesets should be enough
 
        for changeset in repo.get_changesets():
 
            raw_id = changeset.raw_id
 
            self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
 

	
 
    def test_repo_clone_without_create(self):
 
        self.assertRaises(RepositoryError, GitRepository,
 
            TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
 

	
 
    def test_repo_clone_with_update(self):
 
        repo = GitRepository(TEST_GIT_REPO)
 
        clone_path = TEST_GIT_REPO_CLONE + '_with_update'
 
        repo_clone = GitRepository(clone_path,
 
            create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 

	
 
        #check if current workdir was updated
 
        fpath = os.path.join(clone_path, 'MANIFEST.in')
 
        self.assertEqual(True, os.path.isfile(fpath),
 
            'Repo was cloned and updated but file %s could not be found'
 
            % fpath)
 

	
 
    def test_repo_clone_without_update(self):
 
        repo = GitRepository(TEST_GIT_REPO)
 
        clone_path = TEST_GIT_REPO_CLONE + '_without_update'
 
        repo_clone = GitRepository(clone_path,
 
            create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 
        #check if current workdir was *NOT* updated
 
        fpath = os.path.join(clone_path, 'MANIFEST.in')
 
        # Make sure it's not bare repo
 
        self.assertFalse(repo_clone._repo.bare)
 
        self.assertEqual(False, os.path.isfile(fpath),
 
            'Repo was cloned and updated but file %s was found'
 
            % fpath)
 

	
 
    def test_repo_clone_into_bare_repo(self):
 
        repo = GitRepository(TEST_GIT_REPO)
 
        clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
 
        repo_clone = GitRepository(clone_path, create=True,
 
            src_url=repo.path, bare=True)
 
        self.assertTrue(repo_clone._repo.bare)
 

	
 
    def test_create_repo_is_not_bare_by_default(self):
 
        repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
 
        self.assertFalse(repo._repo.bare)
 

	
 
    def test_create_bare_repo(self):
 
        repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
 
        self.assertTrue(repo._repo.bare)
 

	
 
    def test_revisions(self):
 
        # there are 112 revisions (by now)
 
        # so we can assume they would be available from now on
 
        subset = set([
 
            'c1214f7e79e02fc37156ff215cd71275450cffc3',
 
            '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
 
            'fa6600f6848800641328adbf7811fd2372c02ab2',
 
            '102607b09cdd60e2793929c4f90478be29f85a17',
 
            '49d3fd156b6f7db46313fac355dca1a0b94a0017',
 
            '2d1028c054665b962fa3d307adfc923ddd528038',
 
            'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
 
            'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
 
            'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
 
            '8430a588b43b5d6da365400117c89400326e7992',
 
            'd955cd312c17b02143c04fa1099a352b04368118',
 
            'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
 
            'add63e382e4aabc9e1afdc4bdc24506c269b7618',
 
            'f298fe1189f1b69779a4423f40b48edf92a703fc',
 
            'bd9b619eb41994cac43d67cf4ccc8399c1125808',
 
            '6e125e7c890379446e98980d8ed60fba87d0f6d1',
 
            'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
 
            '0b05e4ed56c802098dfc813cbe779b2f49e92500',
 
            '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
 
            '45223f8f114c64bf4d6f853e3c35a369a6305520',
 
            'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
 
            'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
 
            '27d48942240f5b91dfda77accd2caac94708cc7d',
 
            '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
 
            'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
 
        self.assertTrue(subset.issubset(set(self.repo.revisions)))
 

	
 

	
 

	
 
    def test_slicing(self):
 
        #4 1 5 10 95
 
        for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
 
                                 (10, 20, 10), (5, 100, 95)]:
 
            revs = list(self.repo[sfrom:sto])
 
            self.assertEqual(len(revs), size)
 
            self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
 
            self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
 

	
 

	
 
    def test_branches(self):
 
        # TODO: Need more tests here
 
        # Removed (those are 'remotes' branches for cloned repo)
 
        #self.assertTrue('master' in self.repo.branches)
 
        #self.assertTrue('gittree' in self.repo.branches)
 
        #self.assertTrue('web-branch' in self.repo.branches)
 
        for name, id in self.repo.branches.items():
 
            self.assertTrue(isinstance(
 
                self.repo.get_changeset(id), GitChangeset))
 

	
 
    def test_tags(self):
 
        # TODO: Need more tests here
 
        self.assertTrue('v0.1.1' in self.repo.tags)
 
        self.assertTrue('v0.1.2' in self.repo.tags)
 
        for name, id in self.repo.tags.items():
 
            self.assertTrue(isinstance(
 
                self.repo.get_changeset(id), GitChangeset))
 

	
 
    def _test_single_changeset_cache(self, revision):
 
        chset = self.repo.get_changeset(revision)
 
        self.assertTrue(revision in self.repo.changesets)
 
        self.assertTrue(chset is self.repo.changesets[revision])
 

	
 
    def test_initial_changeset(self):
 
        id = self.repo.revisions[0]
 
        init_chset = self.repo.get_changeset(id)
 
        self.assertEqual(init_chset.message, 'initial import\n')
 
        self.assertEqual(init_chset.author,
 
            'Marcin Kuzminski <marcin@python-blog.com>')
 
        for path in ('vcs/__init__.py',
 
                     'vcs/backends/BaseRepository.py',
 
                     'vcs/backends/__init__.py'):
 
            self.assertTrue(isinstance(init_chset.get_node(path), FileNode))
 
        for path in ('', 'vcs', 'vcs/backends'):
 
            self.assertTrue(isinstance(init_chset.get_node(path), DirNode))
 

	
 
        self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
 

	
 
        node = init_chset.get_node('vcs/')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.DIR)
 

	
 
        node = init_chset.get_node('vcs')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.DIR)
 

	
 
        node = init_chset.get_node('vcs/__init__.py')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.FILE)
 

	
 
    def test_not_existing_changeset(self):
 
        self.assertRaises(RepositoryError, self.repo.get_changeset,
 
            'f' * 40)
 

	
 
    def test_changeset10(self):
 

	
 
        chset10 = self.repo.get_changeset(self.repo.revisions[9])
 
        README = """===
 
VCS
 
===
 

	
 
Various Version Control System management abstraction layer for Python.
 

	
 
Introduction
 
------------
 

	
 
TODO: To be written...
 

	
 
"""
 
        node = chset10.get_node('README.rst')
 
        self.assertEqual(node.kind, NodeKind.FILE)
 
        self.assertEqual(node.content, README)
 

	
 

	
 
class GitChangesetTest(unittest.TestCase):
 

	
 
    def setUp(self):
 
        self.repo = GitRepository(TEST_GIT_REPO)
 

	
 
    def test_default_changeset(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip, self.repo.get_changeset(None))
 
        self.assertEqual(tip, self.repo.get_changeset('tip'))
 

	
 
    def test_root_node(self):
 
        tip = self.repo.get_changeset()
 
        self.assertTrue(tip.root is tip.get_node(''))
 

	
 
    def test_lazy_fetch(self):
 
        """
 
        Test if changeset's nodes expands and are cached as we walk through
 
        the revision. This test is somewhat hard to write as order of tests
 
        is a key here. Written by running command after command in a shell.
 
        """
 
        hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
 
        self.assertTrue(hex in self.repo.revisions)
 
        chset = self.repo.get_changeset(hex)
 
        self.assertTrue(len(chset.nodes) == 0)
 
        root = chset.root
 
        self.assertTrue(len(chset.nodes) == 1)
 
        self.assertTrue(len(root.nodes) == 8)
 
        # accessing root.nodes updates chset.nodes
 
        self.assertTrue(len(chset.nodes) == 9)
 

	
 
        docs = root.get_node('docs')
 
        # we haven't yet accessed anything new as docs dir was already cached
 
        self.assertTrue(len(chset.nodes) == 9)
 
        self.assertTrue(len(docs.nodes) == 8)
 
        # accessing docs.nodes updates chset.nodes
 
        self.assertTrue(len(chset.nodes) == 17)
 

	
 
        self.assertTrue(docs is chset.get_node('docs'))
 
        self.assertTrue(docs is root.nodes[0])
 
        self.assertTrue(docs is root.dirs[0])
 
        self.assertTrue(docs is chset.get_node('docs'))
 

	
 
    def test_nodes_with_changeset(self):
 
        hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
 
        chset = self.repo.get_changeset(hex)
 
        root = chset.root
 
        docs = root.get_node('docs')
 
        self.assertTrue(docs is chset.get_node('docs'))
 
        api = docs.get_node('api')
 
        self.assertTrue(api is chset.get_node('docs/api'))
 
        index = api.get_node('index.rst')
 
        self.assertTrue(index is chset.get_node('docs/api/index.rst'))
 
        self.assertTrue(index is chset.get_node('docs')\
 
            .get_node('api')\
 
            .get_node('index.rst'))
 

	
 
    def test_branch_and_tags(self):
 
        """
 
        rev0 = self.repo.revisions[0]
 
        chset0 = self.repo.get_changeset(rev0)
 
        self.assertEqual(chset0.branch, 'master')
 
        self.assertEqual(chset0.tags, [])
 

	
 
        rev10 = self.repo.revisions[10]
 
        chset10 = self.repo.get_changeset(rev10)
 
        self.assertEqual(chset10.branch, 'master')
 
        self.assertEqual(chset10.tags, [])
 

	
 
        rev44 = self.repo.revisions[44]
 
        chset44 = self.repo.get_changeset(rev44)
 
        self.assertEqual(chset44.branch, 'web-branch')
 

	
 
        tip = self.repo.get_changeset('tip')
 
        self.assertTrue('tip' in tip.tags)
 
        """
 
        # Those tests would fail - branches are now going
 
        # to be changed at main API in order to support git backend
 
        pass
 

	
 
    def _test_slices(self, limit, offset):
 
        count = self.repo.count()
 
        changesets = self.repo.get_changesets(limit=limit, offset=offset)
 
        idx = 0
 
        for changeset in changesets:
 
            rev = offset + idx
 
            idx += 1
 
            rev_id = self.repo.revisions[rev]
 
            if idx > limit:
 
                self.fail("Exceeded limit already (getting revision %s, "
 
                    "there are %s total revisions, offset=%s, limit=%s)"
 
                    % (rev_id, count, offset, limit))
 
            self.assertEqual(changeset, self.repo.get_changeset(rev_id))
 
        result = list(self.repo.get_changesets(limit=limit, offset=offset))
 
        start = offset
 
        end = limit and offset + limit or None
 
        sliced = list(self.repo[start:end])
 
        self.failUnlessEqual(result, sliced,
 
            msg="Comparison failed for limit=%s, offset=%s"
 
            "(get_changeset returned: %s and sliced: %s"
 
            % (limit, offset, result, sliced))
 

	
 
    def _test_file_size(self, revision, path, size):
 
        node = self.repo.get_changeset(revision).get_node(path)
 
        self.assertTrue(node.is_file())
 
        self.assertEqual(node.size, size)
 

	
 
    def test_file_size(self):
 
        to_check = (
 
            ('c1214f7e79e02fc37156ff215cd71275450cffc3',
 
                'vcs/backends/BaseRepository.py', 502),
 
            ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
 
                'vcs/backends/hg.py', 854),
 
            ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
 
                'setup.py', 1068),
 

	
 
            ('d955cd312c17b02143c04fa1099a352b04368118',
 
                'vcs/backends/base.py', 2921),
 
            ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
 
                'vcs/backends/base.py', 3936),
 
            ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
 
                'vcs/backends/base.py', 6189),
 
        )
 
        for revision, path, size in to_check:
 
            self._test_file_size(revision, path, size)
 

	
 
    def test_file_history(self):
 
        # we can only check if those revisions are present in the history
 
        # as we cannot update this test every time file is changed
 
        files = {
 
            'setup.py': [
 
                '54386793436c938cff89326944d4c2702340037d',
 
                '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
 
                '998ed409c795fec2012b1c0ca054d99888b22090',
 
                '5e0eb4c47f56564395f76333f319d26c79e2fb09',
 
                '0115510b70c7229dbc5dc49036b32e7d91d23acd',
 
                '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
 
                '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
 
                '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
 
                'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
 
            ],
 
            'vcs/nodes.py': [
 
                '33fa3223355104431402a888fa77a4e9956feb3e',
 
                'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
 
                'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
 
                'ab5721ca0a081f26bf43d9051e615af2cc99952f',
 
                'c877b68d18e792a66b7f4c529ea02c8f80801542',
 
                '4313566d2e417cb382948f8d9d7c765330356054',
 
                '6c2303a793671e807d1cfc70134c9ca0767d98c2',
 
                '54386793436c938cff89326944d4c2702340037d',
 
                '54000345d2e78b03a99d561399e8e548de3f3203',
 
                '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
 
                '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
 
                '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
 
                '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
 
                'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
 
                '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
 
                '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
 
                '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
 
                '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
 
                'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
 
                'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
 
                'f15c21f97864b4f071cddfbf2750ec2e23859414',
 
                'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
 
                'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
 
                '84dec09632a4458f79f50ddbbd155506c460b4f9',
 
                '0115510b70c7229dbc5dc49036b32e7d91d23acd',
 
                '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
 
                '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
 
                'b8d04012574729d2c29886e53b1a43ef16dd00a1',
 
                '6970b057cffe4aab0a792aa634c89f4bebf01441',
 
                'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
 
                'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
 
            ],
 
            'vcs/backends/git.py': [
 
                '4cf116ad5a457530381135e2f4c453e68a1b0105',
 
                '9a751d84d8e9408e736329767387f41b36935153',
 
                'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
 
                '428f81bb652bcba8d631bce926e8834ff49bdcc6',
 
                '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
 
                '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
 
                '50e08c506174d8645a4bb517dd122ac946a0f3bf',
 
                '54000345d2e78b03a99d561399e8e548de3f3203',
 
            ],
 
        }
 
        for path, revs in files.items():
 
            node = self.repo.get_changeset(revs[0]).get_node(path)
 
            node_revs = [chset.raw_id for chset in node.history]
 
            self.assertTrue(set(revs).issubset(set(node_revs)),
 
                "We assumed that %s is subset of revisions for which file %s "
 
                "has been changed, and history of that node returned: %s"
 
                % (revs, path, node_revs))
 

	
 
    def test_file_annotate(self):
 
        files = {
 
            'vcs/backends/__init__.py': {
 
                'c1214f7e79e02fc37156ff215cd71275450cffc3': {
 
                    'lines_no': 1,
 
                    'changesets': [
 
                        'c1214f7e79e02fc37156ff215cd71275450cffc3',
 
                    ],
 
                },
 
                '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
 
                    'lines_no': 21,
 
                    'changesets': [
 
                        '49d3fd156b6f7db46313fac355dca1a0b94a0017',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                    ],
 
                },
 
                'e29b67bd158580fc90fc5e9111240b90e6e86064': {
 
                    'lines_no': 32,
 
                    'changesets': [
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '54000345d2e78b03a99d561399e8e548de3f3203',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '78c3f0c23b7ee935ec276acb8b8212444c33c396',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '78c3f0c23b7ee935ec276acb8b8212444c33c396',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                    ],
 
                },
 
            },
 
        }
 

	
 
        for fname, revision_dict in files.items():
 
            for rev, data in revision_dict.items():
 
                cs = self.repo.get_changeset(rev)
 

	
 
                l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
 
                l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
 
                self.assertEqual(l1_1, l1_2)
 
                l1 = l1_1
 
                l2 = files[fname][rev]['changesets']
 
                self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s"
 
                                "from annotation list should match each other, "
 
                                "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
 

	
 
    def test_files_state(self):
 
        """
 
        Tests state of FileNodes.
 
        """
 
        node = self.repo\
 
            .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
 
            .get_node('vcs/utils/diffs.py')
 
        self.assertTrue(node.state, NodeState.ADDED)
 
        self.assertTrue(node.added)
 
        self.assertFalse(node.changed)
 
        self.assertFalse(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        node = self.repo\
 
            .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\
 
            .get_node('.hgignore')
 
        self.assertTrue(node.state, NodeState.CHANGED)
 
        self.assertFalse(node.added)
 
        self.assertTrue(node.changed)
 
        self.assertFalse(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        node = self.repo\
 
            .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\
 
            .get_node('setup.py')
 
        self.assertTrue(node.state, NodeState.NOT_CHANGED)
 
        self.assertFalse(node.added)
 
        self.assertFalse(node.changed)
 
        self.assertTrue(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        # If node has REMOVED state then trying to fetch it would raise
 
        # ChangesetError exception
 
        chset = self.repo.get_changeset(
 
            'fa6600f6848800641328adbf7811fd2372c02ab2')
 
        path = 'vcs/backends/BaseRepository.py'
 
        self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
 
        # but it would be one of ``removed`` (changeset's attribute)
 
        self.assertTrue(path in [rf.path for rf in chset.removed])
 

	
 
        chset = self.repo.get_changeset(
 
            '54386793436c938cff89326944d4c2702340037d')
 
        changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
 
            'vcs/nodes.py']
 
        self.assertEqual(set(changed), set([f.path for f in chset.changed]))
 

	
 
    def test_commit_message_is_unicode(self):
 
        for cs in self.repo:
 
            self.assertEqual(type(cs.message), unicode)
 

	
 
    def test_changeset_author_is_unicode(self):
 
        for cs in self.repo:
 
            self.assertEqual(type(cs.author), unicode)
 

	
 
    def test_repo_files_content_is_unicode(self):
 
        changeset = self.repo.get_changeset()
 
        for node in changeset.get_node('/'):
 
            if node.is_file():
 
                self.assertEqual(type(node.content), unicode)
 

	
 
    def test_wrong_path(self):
 
        # There is 'setup.py' in the root dir but not there:
 
        path = 'foo/bar/setup.py'
 
        tip = self.repo.get_changeset()
 
        self.assertRaises(VCSError, tip.get_node, path)
 

	
 
    def test_author_email(self):
 
        self.assertEqual('marcin@python-blog.com',
 
          self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
 
          .author_email)
 
        self.assertEqual('lukasz.balcerzak@python-center.pl',
 
          self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
 
          .author_email)
 
        self.assertEqual('none@none',
 
          self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
 
          .author_email)
 

	
 
    def test_author_username(self):
 
        self.assertEqual('Marcin Kuzminski',
 
          self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
 
          .author_name)
 
        self.assertEqual('Lukasz Balcerzak',
 
          self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
 
          .author_name)
 
        self.assertEqual('marcink',
 
          self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
 
          .author_name)
 

	
 

	
 
class GitSpecificTest(unittest.TestCase):
 

	
 
    def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
 
        repo = mock.MagicMock()
 
        changeset = GitChangeset(repo, 'foobar')
 
        changeset._diff_name_status = 'foobar'
 
        with self.assertRaises(VCSError):
 
            changeset.added
 

	
 
    def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
 
        repo = mock.MagicMock()
 
        changeset = GitChangeset(repo, 'foobar')
 
        changeset._diff_name_status = 'foobar'
 
        with self.assertRaises(VCSError):
 
            changeset.added
 

	
 
    def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
 
        repo = mock.MagicMock()
 
        changeset = GitChangeset(repo, 'foobar')
 
        changeset._diff_name_status = 'foobar'
 
        with self.assertRaises(VCSError):
 
            changeset.added
 

	
 

	
 
class GitSpecificWithRepoTest(BackendTestMixin, unittest.TestCase):
 
class GitSpecificWithRepoTest(_BackendTestMixin, unittest.TestCase):
 
    backend_alias = 'git'
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return [
 
            {
 
                'message': 'Initial',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar/static/js/admin/base.js', content='base'),
 
                    FileNode('foobar/static/admin', content='admin',
 
                        mode=0120000), # this is a link
 
                    FileNode('foo', content='foo'),
 
                ],
 
            },
 
            {
 
                'message': 'Second',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'added': [
 
                    FileNode('foo2', content='foo2'),
 
                ],
 
            },
 
        ]
 

	
 
    def test_paths_slow_traversing(self):
 
        cs = self.repo.get_changeset()
 
        self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js')
 
            .get_node('admin').get_node('base.js').content, 'base')
 

	
 
    def test_paths_fast_traversing(self):
 
        cs = self.repo.get_changeset()
 
        self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content,
 
            'base')
 

	
 
    def test_workdir_get_branch(self):
 
        self.repo.run_git_command('checkout -b production')
 
        # Regression test: one of following would fail if we don't check
 
        # .git/HEAD file
 
        self.repo.run_git_command('checkout production')
 
        self.assertEqual(self.repo.workdir.get_branch(), 'production')
 
        self.repo.run_git_command('checkout master')
 
        self.assertEqual(self.repo.workdir.get_branch(), 'master')
 

	
 
    def test_get_diff_runs_git_command_with_hashes(self):
 
        self.repo.run_git_command = mock.Mock(return_value=['', ''])
 
        self.repo.get_diff(0, 1)
 
        self.repo.run_git_command.assert_called_once_with(
 
          'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s' %
 
            (3, self.repo._get_revision(0), self.repo._get_revision(1)))
 

	
 
    def test_get_diff_runs_git_command_with_str_hashes(self):
 
        self.repo.run_git_command = mock.Mock(return_value=['', ''])
 
        self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
 
        self.repo.run_git_command.assert_called_once_with(
 
            'show -U%s --full-index --binary -p -M --abbrev=40 %s' %
 
            (3, self.repo._get_revision(1)))
 

	
 
    def test_get_diff_runs_git_command_with_path_if_its_given(self):
 
        self.repo.run_git_command = mock.Mock(return_value=['', ''])
 
        self.repo.get_diff(0, 1, 'foo')
 
        self.repo.run_git_command.assert_called_once_with(
 
          'diff -U%s --full-index --binary -p -M --abbrev=40 %s %s -- "foo"'
 
            % (3, self.repo._get_revision(0), self.repo._get_revision(1)))
 

	
 

	
 
class GitRegressionTest(BackendTestMixin, unittest.TestCase):
 
class GitRegressionTest(_BackendTestMixin, unittest.TestCase):
 
    backend_alias = 'git'
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return [
 
            {
 
                'message': 'Initial',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('bot/__init__.py', content='base'),
 
                    FileNode('bot/templates/404.html', content='base'),
 
                    FileNode('bot/templates/500.html', content='base'),
 
                ],
 
            },
 
            {
 
                'message': 'Second',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'added': [
 
                    FileNode('bot/build/migrations/1.py', content='foo2'),
 
                    FileNode('bot/build/migrations/2.py', content='foo2'),
 
                    FileNode('bot/build/static/templates/f.html', content='foo2'),
 
                    FileNode('bot/build/static/templates/f1.html', content='foo2'),
 
                    FileNode('bot/build/templates/err.html', content='foo2'),
 
                    FileNode('bot/build/templates/err2.html', content='foo2'),
 
                ],
 
            },
 
        ]
 

	
 
    def test_similar_paths(self):
 
        cs = self.repo.get_changeset()
 
        paths = lambda *n:[x.path for x in n]
 
        self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates'])
 
        # this get_nodes below causes troubles !
 
        self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html'])
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_repository.py
Show inline comments
 
from __future__ import with_statement
 
import datetime
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.tests.vcs.conf import TEST_USER_CONFIG_FILE
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 

	
 

	
 
class RepositoryBaseTest(BackendTestMixin):
 
class RepositoryBaseTest(_BackendTestMixin):
 
    recreate_repo_per_test = False
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return super(RepositoryBaseTest, cls)._get_commits()[:1]
 

	
 
    def test_get_config_value(self):
 
        self.assertEqual(self.repo.get_config_value('universal', 'foo',
 
            TEST_USER_CONFIG_FILE), 'bar')
 

	
 
    def test_get_config_value_defaults_to_None(self):
 
        self.assertEqual(self.repo.get_config_value('universal', 'nonexist',
 
            TEST_USER_CONFIG_FILE), None)
 

	
 
    def test_get_user_name(self):
 
        self.assertEqual(self.repo.get_user_name(TEST_USER_CONFIG_FILE),
 
            'Foo Bar')
 

	
 
    def test_get_user_email(self):
 
        self.assertEqual(self.repo.get_user_email(TEST_USER_CONFIG_FILE),
 
            'foo.bar@example.com')
 

	
 
    def test_repo_equality(self):
 
        self.assertTrue(self.repo == self.repo)
 

	
 
    def test_repo_equality_broken_object(self):
 
        import copy
 
        _repo = copy.copy(self.repo)
 
        delattr(_repo, 'path')
 
        self.assertTrue(self.repo != _repo)
 

	
 
    def test_repo_equality_other_object(self):
 
        class dummy(object):
 
            path = self.repo.path
 
        self.assertTrue(self.repo != dummy())
 

	
 

	
 
class RepositoryGetDiffTest(BackendTestMixin):
 
class RepositoryGetDiffTest(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': 'Initial commit',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='foobar'),
 
                    FileNode('foobar2', content='foobar2'),
 
                ],
 
            },
 
            {
 
                'message': 'Changed foobar, added foobar3',
 
                'author': 'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('foobar3', content='foobar3'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'FOOBAR'),
 
                ],
 
            },
 
            {
 
                'message': 'Removed foobar, changed foobar3',
 
                'author': 'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'changed': [
 
                    FileNode('foobar3', content='FOOBAR\nFOOBAR\nFOOBAR\n'),
 
                ],
 
                'removed': [FileNode('foobar')],
 
            },
 
        ]
 
        return commits
 

	
 
    def test_raise_for_wrong(self):
 
        with self.assertRaises(ChangesetDoesNotExistError):
 
            self.repo.get_diff('a' * 40, 'b' * 40)
 

	
 

	
 
class GitRepositoryGetDiffTest(RepositoryGetDiffTest, unittest.TestCase):
 
    backend_alias = 'git'
 

	
 
    def test_initial_commit_diff(self):
 
        initial_rev = self.repo.revisions[0]
 
        self.assertEqual(self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev), '''diff --git a/foobar b/foobar
 
new file mode 100644
 
index 0000000000000000000000000000000000000000..f6ea0495187600e7b2288c8ac19c5886383a4632
 
--- /dev/null
 
+++ b/foobar
 
@@ -0,0 +1 @@
 
+foobar
 
\ No newline at end of file
 
diff --git a/foobar2 b/foobar2
 
new file mode 100644
 
index 0000000000000000000000000000000000000000..e8c9d6b98e3dce993a464935e1a53f50b56a3783
 
--- /dev/null
 
+++ b/foobar2
 
@@ -0,0 +1 @@
 
+foobar2
 
\ No newline at end of file
 
''')
 

	
 
    def test_second_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[0], revs[1]), '''diff --git a/foobar b/foobar
 
index f6ea0495187600e7b2288c8ac19c5886383a4632..389865bb681b358c9b102d79abd8d5f941e96551 100644
 
--- a/foobar
 
+++ b/foobar
 
@@ -1 +1 @@
 
-foobar
 
\ No newline at end of file
 
+FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
new file mode 100644
 
index 0000000000000000000000000000000000000000..c11c37d41d33fb47741cff93fa5f9d798c1535b0
 
--- /dev/null
 
+++ b/foobar3
 
@@ -0,0 +1 @@
 
+foobar3
 
\ No newline at end of file
 
''')
 

	
 
    def test_third_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[1], revs[2]), '''diff --git a/foobar b/foobar
 
deleted file mode 100644
 
index 389865bb681b358c9b102d79abd8d5f941e96551..0000000000000000000000000000000000000000
 
--- a/foobar
 
+++ /dev/null
 
@@ -1 +0,0 @@
 
-FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
index c11c37d41d33fb47741cff93fa5f9d798c1535b0..f9324477362684ff692aaf5b9a81e01b9e9a671c 100644
 
--- a/foobar3
 
+++ b/foobar3
 
@@ -1 +1,3 @@
 
-foobar3
 
\ No newline at end of file
 
+FOOBAR
 
+FOOBAR
 
+FOOBAR
 
''')
 

	
 

	
 
class HgRepositoryGetDiffTest(RepositoryGetDiffTest, unittest.TestCase):
 
    backend_alias = 'hg'
 

	
 
    def test_initial_commit_diff(self):
 
        initial_rev = self.repo.revisions[0]
 
        self.assertEqual(self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev), '''diff --git a/foobar b/foobar
 
new file mode 100644
 
--- /dev/null
 
+++ b/foobar
 
@@ -0,0 +1,1 @@
 
+foobar
 
\ No newline at end of file
 
diff --git a/foobar2 b/foobar2
 
new file mode 100644
 
--- /dev/null
 
+++ b/foobar2
 
@@ -0,0 +1,1 @@
 
+foobar2
 
\ No newline at end of file
 
''')
 

	
 
    def test_second_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[0], revs[1]), '''diff --git a/foobar b/foobar
 
--- a/foobar
 
+++ b/foobar
 
@@ -1,1 +1,1 @@
 
-foobar
 
\ No newline at end of file
 
+FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
new file mode 100644
 
--- /dev/null
 
+++ b/foobar3
 
@@ -0,0 +1,1 @@
 
+foobar3
 
\ No newline at end of file
 
''')
 

	
 
    def test_third_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[1], revs[2]), '''diff --git a/foobar b/foobar
 
deleted file mode 100644
 
--- a/foobar
 
+++ /dev/null
 
@@ -1,1 +0,0 @@
 
-FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
--- a/foobar3
 
+++ b/foobar3
 
@@ -1,1 +1,3 @@
 
-foobar3
 
\ No newline at end of file
 
+FOOBAR
 
+FOOBAR
 
+FOOBAR
 
''')
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = alias.capitalize() + RepositoryBaseTest.__name__
 
    bases = (RepositoryBaseTest, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_tags.py
Show inline comments
 
from __future__ import with_statement
 

	
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.exceptions import TagAlreadyExistError
 
from kallithea.lib.vcs.exceptions import TagDoesNotExistError
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class TagsTestCaseMixin(BackendTestMixin):
 
class TagsTestCaseMixin(_BackendTestMixin):
 

	
 
    def test_new_tag(self):
 
        tip = self.repo.get_changeset()
 
        tagsize = len(self.repo.tags)
 
        tag = self.repo.tag('last-commit', 'joe', tip.raw_id)
 

	
 
        self.assertEqual(len(self.repo.tags), tagsize + 1)
 
        for top, dirs, files in tip.walk():
 
            self.assertEqual(top, tag.get_node(top.path))
 

	
 
    def test_tag_already_exist(self):
 
        tip = self.repo.get_changeset()
 
        self.repo.tag('last-commit', 'joe', tip.raw_id)
 

	
 
        self.assertRaises(TagAlreadyExistError,
 
            self.repo.tag, 'last-commit', 'joe', tip.raw_id)
 

	
 
        chset = self.repo.get_changeset(0)
 
        self.assertRaises(TagAlreadyExistError,
 
            self.repo.tag, 'last-commit', 'jane', chset.raw_id)
 

	
 
    def test_remove_tag(self):
 
        tip = self.repo.get_changeset()
 
        self.repo.tag('last-commit', 'joe', tip.raw_id)
 
        tagsize = len(self.repo.tags)
 

	
 
        self.repo.remove_tag('last-commit', user='evil joe')
 
        self.assertEqual(len(self.repo.tags), tagsize - 1)
 

	
 
    def test_remove_tag_which_does_not_exist(self):
 
        self.assertRaises(TagDoesNotExistError,
 
            self.repo.remove_tag, 'last-commit', user='evil joe')
 

	
 
    def test_name_with_slash(self):
 
        self.repo.tag('19/10/11', 'joe')
 
        self.assertTrue('19/10/11' in self.repo.tags)
 
        self.repo.tag('11', 'joe')
 
        self.assertTrue('11' in self.repo.tags)
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s tags test' % alias).title().split())
 
    bases = (TagsTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_workdirs.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.base import BackendTestMixin
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 

	
 
class WorkdirTestCaseMixin(BackendTestMixin):
 
class WorkdirTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': u'Initial commit',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='Foobar'),
 
                    FileNode('foobar2', content='Foobar II'),
 
                    FileNode('foo/bar/baz', content='baz here!'),
 
                ],
 
            },
 
            {
 
                'message': u'Changes...',
 
                'author': u'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('some/new.txt', content='news...'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'Foobar I'),
 
                ],
 
                'removed': [],
 
            },
 
        ]
 
        return commits
 

	
 
    def test_get_branch_for_default_branch(self):
 
        self.assertEqual(self.repo.workdir.get_branch(),
 
            self.repo.DEFAULT_BRANCH_NAME)
 

	
 
    def test_get_branch_after_adding_one(self):
 
        self.imc.add(FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertEqual(self.repo.workdir.get_branch(), self.default_branch)
 

	
 
    def test_get_changeset(self):
 
        old_head = self.repo.get_changeset()
 
        self.imc.add(FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        head = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertEqual(self.repo.workdir.get_branch(), self.default_branch)
 
        self.repo.workdir.checkout_branch('foobar')
 
        self.assertEqual(self.repo.workdir.get_changeset(), head)
 

	
 
        # Make sure that old head is still there after update to defualt branch
 
        self.repo.workdir.checkout_branch(self.default_branch)
 
        self.assertEqual(self.repo.workdir.get_changeset(), old_head)
 

	
 
    def test_checkout_branch(self):
 
        from kallithea.lib.vcs.exceptions import BranchDoesNotExistError
 
        # first, 'foobranch' does not exist.
 
        self.assertRaises(BranchDoesNotExistError, self.repo.workdir.checkout_branch,
 
                          branch='foobranch')
 
        # create new branch 'foobranch'.
 
        self.imc.add(FileNode('file1', content='blah'))
 
        self.imc.commit(message=u'asd', author=u'john', branch='foobranch')
 
        # go back to the default branch
 
        self.repo.workdir.checkout_branch()
 
        self.assertEqual(self.repo.workdir.get_branch(), self.backend_class.DEFAULT_BRANCH_NAME)
 
        # checkout 'foobranch'
 
        self.repo.workdir.checkout_branch('foobranch')
 
        self.assertEqual(self.repo.workdir.get_branch(), 'foobranch')
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s branch test' % alias).title().split())
 
    bases = (WorkdirTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
0 comments (0 inline, 0 general)