Changeset - d7488551578e
[Not reviewed]
beta
1 58 1
Marcin Kuzminski - 13 years ago 2013-04-27 11:24:25
marcin@python-works.com
synced vcs with upstream

- moved subprocessio module to VCS
- many small changes to make embedded vcs as similar to to external lib
- use only absolute imports
- patch vcs config during load pylons env
34 files changed:
0 comments (0 inline, 0 general)
rhodecode/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.__init__
 
    ~~~~~~~~~~~~~~~~~~
 

	
 
    RhodeCode, a web based repository management based on pylons
 
    versioning implementation: http://www.python.org/dev/peps/pep-0386/
 

	
 
    :created_on: Apr 9, 2010
 
    :author: marcink
 
    :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import sys
 
import platform
 

	
 
VERSION = (1, 7, 0, 'dev')
 
BACKENDS = {
 
    'hg': 'Mercurial repository',
 
    'git': 'Git repository',
 
}
 

	
 
CELERY_ON = False
 
CELERY_EAGER = False
 

	
 
# link to config for pylons
 
CONFIG = {}
 

	
 
# Linked module for extensions
 
EXTENSIONS = {}
 

	
 
try:
 
    from rhodecode.lib import get_current_revision
 
    _rev = get_current_revision()
 
    if _rev and len(VERSION) > 3:
 
        VERSION += ('%s' % _rev[0],)
 
except ImportError:
 
    pass
 

	
 
__version__ = ('.'.join((str(each) for each in VERSION[:3])) +
 
               '.'.join(VERSION[3:]))
 
__dbversion__ = 12  # defines current db version for migrations
 
__platform__ = platform.system()
 
__license__ = 'GPLv3'
 
__py_version__ = sys.version_info
 
__author__ = 'Marcin Kuzminski'
 
__url__ = 'http://rhodecode.org'
 

	
 
PLATFORM_WIN = ('Windows')
 
PLATFORM_OTHERS = ('Linux', 'Darwin', 'FreeBSD', 'OpenBSD', 'SunOS') #depracated
 

	
 
is_windows = __platform__ in PLATFORM_WIN
 
is_windows = __platform__ in ('Windows')
 
is_unix = not is_windows
 

	
 

	
 
BACKENDS = {
 
    'hg': 'Mercurial repository',
 
    'git': 'Git repository',
 
}
 

	
 
CELERY_ON = False
 
CELERY_EAGER = False
 

	
 
# link to config for pylons
 
CONFIG = {}
 

	
 
# Linked module for extensions
 
EXTENSIONS = {}
rhodecode/config/environment.py
Show inline comments
 
"""Pylons environment configuration"""
 

	
 
import os
 
import logging
 
import rhodecode
 

	
 
from mako.lookup import TemplateLookup
 
from pylons.configuration import PylonsConfig
 
from pylons.error import handle_mako_error
 

	
 
# don't remove this import it does magic for celery
 
from rhodecode.lib import celerypylons
 

	
 
import rhodecode.lib.app_globals as app_globals
 

	
 
from rhodecode.config.routing import make_map
 

	
 
from rhodecode.lib import helpers
 
from rhodecode.lib.auth import set_available_permissions
 
from rhodecode.lib.utils import repo2db_mapper, make_ui, set_rhodecode_config,\
 
    load_rcextensions, check_git_version
 
    load_rcextensions, check_git_version, set_vcs_config
 
from rhodecode.lib.utils2 import engine_from_config, str2bool
 
from rhodecode.lib.db_manage import DbManage
 
from rhodecode.model import init_model
 
from rhodecode.model.scm import ScmModel
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def load_environment(global_conf, app_conf, initial=False):
 
    """
 
    Configure the Pylons environment via the ``pylons.config``
 
    object
 
    """
 
    config = PylonsConfig()
 

	
 
    # Pylons paths
 
    root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 
    paths = dict(
 
        root=root,
 
        controllers=os.path.join(root, 'controllers'),
 
        static_files=os.path.join(root, 'public'),
 
        templates=[os.path.join(root, 'templates')]
 
    )
 

	
 
    # Initialize config with the basic options
 
    config.init_app(global_conf, app_conf, package='rhodecode', paths=paths)
 

	
 
    # store some globals into rhodecode
 
    rhodecode.CELERY_ON = str2bool(config['app_conf'].get('use_celery'))
 
    rhodecode.CELERY_EAGER = str2bool(config['app_conf'].get('celery.always.eager'))
 

	
 
    config['routes.map'] = make_map(config)
 
    config['pylons.app_globals'] = app_globals.Globals(config)
 
    config['pylons.h'] = helpers
 
    rhodecode.CONFIG = config
 

	
 
    load_rcextensions(root_path=config['here'])
 

	
 
    # Setup cache object as early as possible
 
    import pylons
 
    pylons.cache._push_object(config['pylons.app_globals'].cache)
 

	
 
    # Create the Mako TemplateLookup, with the default auto-escaping
 
    config['pylons.app_globals'].mako_lookup = TemplateLookup(
 
        directories=paths['templates'],
 
        error_handler=handle_mako_error,
 
        module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
 
        input_encoding='utf-8', default_filters=['escape'],
 
        imports=['from webhelpers.html import escape'])
 

	
 
    # sets the c attribute access when don't existing attribute are accessed
 
    config['pylons.strict_tmpl_context'] = True
 
    test = os.path.split(config['__file__'])[-1] == 'test.ini'
 
    if test:
 
        if os.environ.get('TEST_DB'):
 
            # swap config if we pass enviroment variable
 
            config['sqlalchemy.db1.url'] = os.environ.get('TEST_DB')
 

	
 
        from rhodecode.lib.utils import create_test_env, create_test_index
 
        from rhodecode.tests import  TESTS_TMP_PATH
 
        # set RC_NO_TMP_PATH=1 to disable re-creating the database and
 
        # test repos
 
        if not int(os.environ.get('RC_NO_TMP_PATH', 0)):
 
            create_test_env(TESTS_TMP_PATH, config)
 
        # set RC_WHOOSH_TEST_DISABLE=1 to disable whoosh index during tests
 
        if not int(os.environ.get('RC_WHOOSH_TEST_DISABLE', 0)):
 
            create_test_index(TESTS_TMP_PATH, config, True)
 

	
 
    #check git version
 
    check_git_version()
 
    DbManage.check_waitress()
 
    # MULTIPLE DB configs
 
    # Setup the SQLAlchemy database engine
 
    sa_engine_db1 = engine_from_config(config, 'sqlalchemy.db1.')
 
    init_model(sa_engine_db1)
 

	
 
    repos_path = make_ui('db').configitems('paths')[0][1]
 
    if str2bool(config.get('initial_repo_scan', True)):
 
        repo2db_mapper(ScmModel().repo_scan(repos_path),
 
                       remove_obsolete=False, install_git_hook=False)
 
    set_available_permissions(config)
 
    config['base_path'] = repos_path
 
    set_rhodecode_config(config)
 

	
 
    instance_id = rhodecode.CONFIG.get('instance_id')
 
    if instance_id == '*':
 
        instance_id = '%s-%s' % (os.uname()[1], os.getpid())
 
        rhodecode.CONFIG['instance_id'] = instance_id
 

	
 
    # CONFIGURATION OPTIONS HERE (note: all config options will override
 
    # any Pylons config options)
 

	
 
    # store config reference into our module to skip import magic of
 
    # pylons
 
    rhodecode.CONFIG.update(config)
 

	
 
    set_vcs_config(rhodecode.CONFIG)
 
    return config
rhodecode/lib/__init__.py
Show inline comments
 
import os
 

	
 

	
 
def get_current_revision(quiet=False):
 
    """
 
    Returns tuple of (number, id) from repository containing this package
 
    or None if repository could not be found.
 

	
 
    :param quiet: prints error for fetching revision if True
 
    """
 

	
 
    try:
 
        from rhodecode.lib.vcs import get_repo
 
        from rhodecode.lib.vcs.utils.helpers import get_scm
 
        repopath = os.path.join(os.path.dirname(__file__), '..', '..')
 
        repopath = os.path.abspath(os.path.join(os.path.dirname(__file__),
 
                                                '..', '..'))
 
        scm = get_scm(repopath)[0]
 
        repo = get_repo(path=repopath, alias=scm)
 
        wk_dir = repo.workdir
 
        cur_rev = wk_dir.get_changeset()
 
        return (cur_rev.revision, cur_rev.short_id)
 
    except Exception, err:
 
        if not quiet:
 
            print ("WARNING: Cannot retrieve rhodecode's revision. "
 
                   "disregard this if you don't know what that means. "
 
                   "Original error was: %s" % err)
 
        return None
rhodecode/lib/compat.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    rhodecode.lib.compat
 
    ~~~~~~~~~~~~~~~~~~~~
 

	
 
    Python backward compatibility functions and common libs
 

	
 

	
 
    :created_on: Oct 7, 2011
 
    :author: marcink
 
    :copyright: (C) 2010-2010 Marcin Kuzminski <marcin@python-works.com>
 
    :license: GPLv3, see COPYING for more details.
 
"""
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
import os
 
from rhodecode import __platform__, PLATFORM_WIN, __py_version__
 
from rhodecode import __py_version__, is_windows
 

	
 
#==============================================================================
 
# json
 
#==============================================================================
 
from rhodecode.lib.ext_json import json
 
import array
 

	
 

	
 
#==============================================================================
 
# izip_longest
 
#==============================================================================
 
try:
 
    from itertools import izip_longest
 
except ImportError:
 
    import itertools
 

	
 
    def izip_longest(*args, **kwds):
 
        fillvalue = kwds.get("fillvalue")
 

	
 
        def sentinel(counter=([fillvalue] * (len(args) - 1)).pop):
 
            yield counter()  # yields the fillvalue, or raises IndexError
 

	
 
        fillers = itertools.repeat(fillvalue)
 
        iters = [itertools.chain(it, sentinel(), fillers)
 
                    for it in args]
 
        try:
 
            for tup in itertools.izip(*iters):
 
                yield tup
 
        except IndexError:
 
            pass
 

	
 

	
 
#==============================================================================
 
# OrderedDict
 
#==============================================================================
 

	
 
# Python Software Foundation License
 

	
 
# XXX: it feels like using the class with "is" and "is not" instead of "==" and
 
# "!=" should be faster.
 
class _Nil(object):
 

	
 
    def __repr__(self):
 
        return "nil"
 

	
 
    def __eq__(self, other):
 
        if (isinstance(other, _Nil)):
 
            return True
 
        else:
 
            return NotImplemented
 

	
 
    def __ne__(self, other):
 
        if (isinstance(other, _Nil)):
 
            return False
 
@@ -321,149 +320,150 @@ class _odict(object):
 
            curr_key = pred_key
 

	
 
    def ritems(self):
 
        """List of the (key, value) in reversed order.
 
        """
 
        return list(self.riteritems())
 

	
 
    def firstkey(self):
 
        if self:
 
            return self._dict_impl().__getattribute__(self, 'lh')
 
        else:
 
            raise KeyError("'firstkey(): ordered dictionary is empty'")
 

	
 
    def lastkey(self):
 
        if self:
 
            return self._dict_impl().__getattribute__(self, 'lt')
 
        else:
 
            raise KeyError("'lastkey(): ordered dictionary is empty'")
 

	
 
    def as_dict(self):
 
        return self._dict_impl()(self.items())
 

	
 
    def _repr(self):
 
        """_repr(): low level repr of the whole data contained in the odict.
 
        Useful for debugging.
 
        """
 
        dict_impl = self._dict_impl()
 
        form = "odict low level repr lh,lt,data: %r, %r, %s"
 
        return form % (dict_impl.__getattribute__(self, 'lh'),
 
                       dict_impl.__getattribute__(self, 'lt'),
 
                       dict_impl.__repr__(self))
 

	
 

	
 
class OrderedDict(_odict, dict):
 

	
 
    def _dict_impl(self):
 
        return dict
 

	
 

	
 
#==============================================================================
 
# OrderedSet
 
#==============================================================================
 
from sqlalchemy.util import OrderedSet
 

	
 

	
 
#==============================================================================
 
# kill FUNCTIONS
 
#==============================================================================
 
if __platform__ in PLATFORM_WIN:
 
if is_windows:
 
    import ctypes
 

	
 
    def kill(pid, sig):
 
        """kill function for Win32"""
 
        kernel32 = ctypes.windll.kernel32
 
        handle = kernel32.OpenProcess(1, 0, pid)
 
        return (0 != kernel32.TerminateProcess(handle, 0))
 

	
 
else:
 
    kill = os.kill
 

	
 

	
 
#==============================================================================
 
# itertools.product
 
#==============================================================================
 

	
 
try:
 
    from itertools import product
 
except ImportError:
 
    def product(*args, **kwds):
 
        # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
 
        # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
 
        pools = map(tuple, args) * kwds.get('repeat', 1)
 
        result = [[]]
 
        for pool in pools:
 
            result = [x + [y] for x in result for y in pool]
 
        for prod in result:
 
            yield tuple(prod)
 

	
 

	
 
#==============================================================================
 
# BytesIO
 
#==============================================================================
 

	
 
try:
 
    from io import BytesIO
 
except ImportError:
 
    from cStringIO import StringIO as BytesIO
 

	
 

	
 
#==============================================================================
 
# bytes
 
#==============================================================================
 
if __py_version__ >= (2, 6):
 
    _bytes = bytes
 
else:
 
    # in py2.6 bytes is a synonim for str
 
    _bytes = str
 

	
 
if __py_version__ >= (2, 6):
 
    _bytearray = bytearray
 
else:
 
    import array
 
    # no idea if this is correct but all integration tests are passing
 
    # i think we never use bytearray anyway
 
    _bytearray = array
 

	
 

	
 
#==============================================================================
 
# deque
 
#==============================================================================
 

	
 
if __py_version__ >= (2, 6):
 
    from collections import deque
 
else:
 
    #need to implement our own deque with maxlen
 
    class deque(object):
 

	
 
        def __init__(self, iterable=(), maxlen= -1):
 
            if not hasattr(self, 'data'):
 
                self.left = self.right = 0
 
                self.data = {}
 
            self.maxlen = maxlen or -1
 
            self.extend(iterable)
 

	
 
        def append(self, x):
 
            self.data[self.right] = x
 
            self.right += 1
 
            if self.maxlen != -1 and len(self) > self.maxlen:
 
                self.popleft()
 

	
 
        def appendleft(self, x):
 
            self.left -= 1
 
            self.data[self.left] = x
 
            if self.maxlen != -1 and len(self) > self.maxlen:
 
                self.pop()
 

	
 
        def pop(self):
 
            if self.left == self.right:
 
                raise IndexError('cannot pop from empty deque')
 
            self.right -= 1
 
            elem = self.data[self.right]
 
            del self.data[self.right]
 
            return elem
 

	
 
        def popleft(self):
 
            if self.left == self.right:
 
                raise IndexError('cannot pop from empty deque')
 
            elem = self.data[self.left]
 
            del self.data[self.left]
 
            self.left += 1
 
@@ -503,218 +503,48 @@ else:
 
            except KeyError:
 
                raise IndexError
 

	
 
        def __delitem__(self, i):
 
            size = len(self)
 
            if not (-size <= i < size):
 
                raise IndexError
 
            data = self.data
 
            if i < 0:
 
                i += size
 
            for j in xrange(self.left + i, self.right - 1):
 
                data[j] = data[j + 1]
 
            self.pop()
 

	
 
        def __len__(self):
 
            return self.right - self.left
 

	
 
        def __cmp__(self, other):
 
            if type(self) != type(other):
 
                return cmp(type(self), type(other))
 
            return cmp(list(self), list(other))
 

	
 
        def __repr__(self, _track=[]):
 
            if id(self) in _track:
 
                return '...'
 
            _track.append(id(self))
 
            r = 'deque(%r, maxlen=%s)' % (list(self), self.maxlen)
 
            _track.remove(id(self))
 
            return r
 

	
 
        def __getstate__(self):
 
            return (tuple(self),)
 

	
 
        def __setstate__(self, s):
 
            self.__init__(s[0])
 

	
 
        def __hash__(self):
 
            raise TypeError
 

	
 
        def __copy__(self):
 
            return self.__class__(self)
 

	
 
        def __deepcopy__(self, memo={}):
 
            from copy import deepcopy
 
            result = self.__class__()
 
            memo[id(self)] = result
 
            result.__init__(deepcopy(tuple(self), memo))
 
            return result
 

	
 

	
 
#==============================================================================
 
# threading.Event
 
#==============================================================================
 

	
 
if __py_version__ >= (2, 6):
 
    from threading import Event, Thread
 
else:
 
    from threading import _Verbose, Condition, Lock, Thread, _time, \
 
        _allocate_lock, RLock, _sleep
 

	
 
    def Condition(*args, **kwargs):
 
        return _Condition(*args, **kwargs)
 

	
 
    class _Condition(_Verbose):
 

	
 
        def __init__(self, lock=None, verbose=None):
 
            _Verbose.__init__(self, verbose)
 
            if lock is None:
 
                lock = RLock()
 
            self.__lock = lock
 
            # Export the lock's acquire() and release() methods
 
            self.acquire = lock.acquire
 
            self.release = lock.release
 
            # If the lock defines _release_save() and/or _acquire_restore(),
 
            # these override the default implementations (which just call
 
            # release() and acquire() on the lock).  Ditto for _is_owned().
 
            try:
 
                self._release_save = lock._release_save
 
            except AttributeError:
 
                pass
 
            try:
 
                self._acquire_restore = lock._acquire_restore
 
            except AttributeError:
 
                pass
 
            try:
 
                self._is_owned = lock._is_owned
 
            except AttributeError:
 
                pass
 
            self.__waiters = []
 

	
 
        def __enter__(self):
 
            return self.__lock.__enter__()
 

	
 
        def __exit__(self, *args):
 
            return self.__lock.__exit__(*args)
 

	
 
        def __repr__(self):
 
            return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
 

	
 
        def _release_save(self):
 
            self.__lock.release()           # No state to save
 

	
 
        def _acquire_restore(self, x):
 
            self.__lock.acquire()           # Ignore saved state
 

	
 
        def _is_owned(self):
 
            # Return True if lock is owned by current_thread.
 
            # This method is called only if __lock doesn't have _is_owned().
 
            if self.__lock.acquire(0):
 
                self.__lock.release()
 
                return False
 
            else:
 
                return True
 

	
 
        def wait(self, timeout=None):
 
            if not self._is_owned():
 
                raise RuntimeError("cannot wait on un-acquired lock")
 
            waiter = _allocate_lock()
 
            waiter.acquire()
 
            self.__waiters.append(waiter)
 
            saved_state = self._release_save()
 
            try:    # restore state no matter what (e.g., KeyboardInterrupt)
 
                if timeout is None:
 
                    waiter.acquire()
 
                    if __debug__:
 
                        self._note("%s.wait(): got it", self)
 
                else:
 
                    # Balancing act:  We can't afford a pure busy loop, so we
 
                    # have to sleep; but if we sleep the whole timeout time,
 
                    # we'll be unresponsive.  The scheme here sleeps very
 
                    # little at first, longer as time goes on, but never longer
 
                    # than 20 times per second (or the timeout time remaining).
 
                    endtime = _time() + timeout
 
                    delay = 0.0005 # 500 us -> initial delay of 1 ms
 
                    while True:
 
                        gotit = waiter.acquire(0)
 
                        if gotit:
 
                            break
 
                        remaining = endtime - _time()
 
                        if remaining <= 0:
 
                            break
 
                        delay = min(delay * 2, remaining, .05)
 
                        _sleep(delay)
 
                    if not gotit:
 
                        if __debug__:
 
                            self._note("%s.wait(%s): timed out", self, timeout)
 
                        try:
 
                            self.__waiters.remove(waiter)
 
                        except ValueError:
 
                            pass
 
                    else:
 
                        if __debug__:
 
                            self._note("%s.wait(%s): got it", self, timeout)
 
            finally:
 
                self._acquire_restore(saved_state)
 

	
 
        def notify(self, n=1):
 
            if not self._is_owned():
 
                raise RuntimeError("cannot notify on un-acquired lock")
 
            __waiters = self.__waiters
 
            waiters = __waiters[:n]
 
            if not waiters:
 
                if __debug__:
 
                    self._note("%s.notify(): no waiters", self)
 
                return
 
            self._note("%s.notify(): notifying %d waiter%s", self, n,
 
                       n != 1 and "s" or "")
 
            for waiter in waiters:
 
                waiter.release()
 
                try:
 
                    __waiters.remove(waiter)
 
                except ValueError:
 
                    pass
 

	
 
        def notifyAll(self):
 
            self.notify(len(self.__waiters))
 

	
 
        notify_all = notifyAll
 

	
 
    def Event(*args, **kwargs):
 
        return _Event(*args, **kwargs)
 

	
 
    class _Event(_Verbose):
 

	
 
        # After Tim Peters' event class (without is_posted())
 

	
 
        def __init__(self, verbose=None):
 
            _Verbose.__init__(self, verbose)
 
            self.__cond = Condition(Lock())
 
            self.__flag = False
 

	
 
        def isSet(self):
 
            return self.__flag
 

	
 
        is_set = isSet
 

	
 
        def set(self):
 
            self.__cond.acquire()
 
            try:
 
                self.__flag = True
 
                self.__cond.notify_all()
 
            finally:
 
                self.__cond.release()
 

	
 
        def clear(self):
 
            self.__cond.acquire()
 
            try:
 
                self.__flag = False
 
            finally:
 
                self.__cond.release()
 

	
 
        def wait(self, timeout=None):
 
            self.__cond.acquire()
 
            try:
 
                if not self.__flag:
 
                    self.__cond.wait(timeout)
 
            finally:
 
                self.__cond.release()
rhodecode/lib/middleware/pygrack.py
Show inline comments
 
import os
 
import socket
 
import logging
 
import subprocess
 
import traceback
 

	
 
from webob import Request, Response, exc
 

	
 
import rhodecode
 
from rhodecode.lib import subprocessio
 
from rhodecode.lib.vcs import subprocessio
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FileWrapper(object):
 

	
 
    def __init__(self, fd, content_length):
 
        self.fd = fd
 
        self.content_length = content_length
 
        self.remain = content_length
 

	
 
    def read(self, size):
 
        if size <= self.remain:
 
            try:
 
                data = self.fd.read(size)
 
            except socket.error:
 
                raise IOError(self)
 
            self.remain -= size
 
        elif self.remain:
 
            data = self.fd.read(self.remain)
 
            self.remain = 0
 
        else:
 
            data = None
 
        return data
 

	
 
    def __repr__(self):
 
        return '<FileWrapper %s len: %s, read: %s>' % (
 
            self.fd, self.content_length, self.content_length - self.remain
 
        )
 

	
 

	
 
class GitRepository(object):
 
    git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
 
    commands = ['git-upload-pack', 'git-receive-pack']
 

	
 
    def __init__(self, repo_name, content_path, extras):
 
        files = set([f.lower() for f in os.listdir(content_path)])
 
        if  not (self.git_folder_signature.intersection(files)
 
                == self.git_folder_signature):
 
            raise OSError('%s missing git signature' % content_path)
 
        self.content_path = content_path
 
        self.valid_accepts = ['application/x-%s-result' %
 
                              c for c in self.commands]
 
        self.repo_name = repo_name
 
        self.extras = extras
 

	
 
    def _get_fixedpath(self, path):
 
        """
rhodecode/lib/subprocessio.py
Show inline comments
 
deleted file
rhodecode/lib/utils.py
Show inline comments
 
@@ -329,96 +329,116 @@ def make_ui(read_from='file', path=None,
 

	
 
    if read_from == 'file':
 
        if not os.path.isfile(path):
 
            log.debug('hgrc file is not present at %s, skipping...' % path)
 
            return False
 
        log.debug('reading hgrc from %s' % path)
 
        cfg = config.config()
 
        cfg.read(path)
 
        for section in ui_sections:
 
            for k, v in cfg.items(section):
 
                log.debug('settings ui from file: [%s] %s=%s' % (section, k, v))
 
                baseui.setconfig(safe_str(section), safe_str(k), safe_str(v))
 

	
 
    elif read_from == 'db':
 
        sa = meta.Session()
 
        ret = sa.query(RhodeCodeUi)\
 
            .options(FromCache("sql_cache_short", "get_hg_ui_settings"))\
 
            .all()
 

	
 
        hg_ui = ret
 
        for ui_ in hg_ui:
 
            if ui_.ui_active:
 
                log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section,
 
                          ui_.ui_key, ui_.ui_value)
 
                baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
 
                                 safe_str(ui_.ui_value))
 
            if ui_.ui_key == 'push_ssl':
 
                # force set push_ssl requirement to False, rhodecode
 
                # handles that
 
                baseui.setconfig(safe_str(ui_.ui_section), safe_str(ui_.ui_key),
 
                                 False)
 
        if clear_session:
 
            meta.Session.remove()
 
    return baseui
 

	
 

	
 
def set_rhodecode_config(config):
 
    """
 
    Updates pylons config with new settings from database
 

	
 
    :param config:
 
    """
 
    hgsettings = RhodeCodeSetting.get_app_settings()
 

	
 
    for k, v in hgsettings.items():
 
        config[k] = v
 

	
 

	
 
def set_vcs_config(config):
 
    """
 
    Patch VCS config with some RhodeCode specific stuff
 

	
 
    :param config: rhodecode.CONFIG
 
    """
 
    import rhodecode
 
    from rhodecode.lib.vcs import conf
 
    from rhodecode.lib.utils2 import aslist
 
    conf.settings.BACKENDS = {
 
        'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
 
        'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
 
    }
 

	
 
    conf.settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
 
    conf.settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
 
    conf.settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
 
                                                        'utf8'), sep=',')
 

	
 

	
 
def map_groups(path):
 
    """
 
    Given a full path to a repository, create all nested groups that this
 
    repo is inside. This function creates parent-child relationships between
 
    groups and creates default perms for all new groups.
 

	
 
    :param paths: full path to repository
 
    """
 
    sa = meta.Session()
 
    groups = path.split(Repository.url_sep())
 
    parent = None
 
    group = None
 

	
 
    # last element is repo in nested groups structure
 
    groups = groups[:-1]
 
    rgm = ReposGroupModel(sa)
 
    owner = User.get_first_admin()
 
    for lvl, group_name in enumerate(groups):
 
        group_name = '/'.join(groups[:lvl] + [group_name])
 
        group = RepoGroup.get_by_group_name(group_name)
 
        desc = '%s group' % group_name
 

	
 
        # skip folders that are now removed repos
 
        if REMOVED_REPO_PAT.match(group_name):
 
            break
 

	
 
        if group is None:
 
            log.debug('creating group level: %s group_name: %s'
 
                      % (lvl, group_name))
 
            group = RepoGroup(group_name, parent)
 
            group.group_description = desc
 
            group.user = owner
 
            sa.add(group)
 
            perm_obj = rgm._create_default_perms(group)
 
            sa.add(perm_obj)
 
            sa.flush()
 

	
 
        parent = group
 
    return group
 

	
 

	
 
def repo2db_mapper(initial_repo_list, remove_obsolete=False,
 
                   install_git_hook=False):
 
    """
 
    maps all repos given in initial_repo_list, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_list and removes them.
 

	
 
@@ -694,101 +714,103 @@ class BasePasterCommand(Command):
 
        # Decrement because we're going to lob off the first argument.
 
        # @@ This is hacky
 
        self.min_args -= 1
 
        self.bootstrap_config(args[0])
 
        self.update_parser()
 
        return super(BasePasterCommand, self).run(args[1:])
 

	
 
    def update_parser(self):
 
        """
 
        Abstract method.  Allows for the class's parser to be updated
 
        before the superclass's `run` method is called.  Necessary to
 
        allow options/arguments to be passed through to the underlying
 
        celery command.
 
        """
 
        raise NotImplementedError("Abstract Method.")
 

	
 
    def bootstrap_config(self, conf):
 
        """
 
        Loads the pylons configuration.
 
        """
 
        from pylons import config as pylonsconfig
 

	
 
        self.path_to_ini_file = os.path.realpath(conf)
 
        conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
 
        pylonsconfig.init_app(conf.global_conf, conf.local_conf)
 

	
 
    def _init_session(self):
 
        """
 
        Inits SqlAlchemy Session
 
        """
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        from pylons import config
 
        from rhodecode.model import init_model
 
        from rhodecode.lib.utils2 import engine_from_config
 

	
 
        #get to remove repos !!
 
        add_cache(config)
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 

	
 

	
 
def check_git_version():
 
    """
 
    Checks what version of git is installed in system, and issues a warning
 
    if it's too old for RhodeCode to properly work.
 
    """
 
    from rhodecode import BACKENDS
 
    from rhodecode.lib.vcs.backends.git.repository import GitRepository
 
    from rhodecode.lib.vcs.conf import settings
 
    from distutils.version import StrictVersion
 

	
 
    stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
 
                                                    _safe=True)
 

	
 
    ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
 
    if len(ver.split('.')) > 3:
 
        #StrictVersion needs to be only 3 element type
 
        ver = '.'.join(ver.split('.')[:3])
 
    try:
 
        _ver = StrictVersion(ver)
 
    except Exception:
 
        _ver = StrictVersion('0.0.0')
 
        stderr = traceback.format_exc()
 

	
 
    req_ver = '1.7.4'
 
    to_old_git = False
 
    if  _ver < StrictVersion(req_ver):
 
        to_old_git = True
 

	
 
    if 'git' in BACKENDS:
 
        log.debug('GIT version detected: %s' % stdout)
 
        log.debug('GIT executable: "%s" version detected: %s'
 
                  % (settings.GIT_EXECUTABLE_PATH, stdout))
 
        if stderr:
 
            log.warning('Unable to detect git version, org error was: %r' % stderr)
 
        elif to_old_git:
 
            log.warning('RhodeCode detected git version %s, which is too old '
 
                        'for the system to function properly. Make sure '
 
                        'its version is at least %s' % (ver, req_ver))
 
    return _ver
 

	
 

	
 
@decorator.decorator
 
def jsonify(func, *args, **kwargs):
 
    """Action decorator that formats output for JSON
 

	
 
    Given a function that will return content, this decorator will turn
 
    the result into JSON, with a content-type of 'application/json' and
 
    output it.
 

	
 
    """
 
    from pylons.decorators.util import get_pylons
 
    from rhodecode.lib.compat import json
 
    pylons = get_pylons(args)
 
    pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
 
    data = func(*args, **kwargs)
 
    if isinstance(data, (list, tuple)):
 
        msg = "JSON responses with Array envelopes are susceptible to " \
 
              "cross-site data leak attacks, see " \
 
              "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
 
        warnings.warn(msg, Warning, 2)
 
        log.warning(msg)
 
    log.debug("Returning JSON wrapped action output")
 
    return json.dumps(data, encoding='utf-8')
rhodecode/lib/vcs/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs
 
    ~~~
 

	
 
    Various version Control System (vcs) management abstraction layer for
 
    Python.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
VERSION = (0, 3, 0, 'dev')
 
VERSION = (0, 4, 0, 'dev')
 

	
 
__version__ = '.'.join((str(each) for each in VERSION[:4]))
 

	
 
__all__ = [
 
    'get_version', 'get_repo', 'get_backend',
 
    'VCSError', 'RepositoryError', 'ChangesetError']
 
    'VCSError', 'RepositoryError', 'ChangesetError'
 
]
 

	
 
import sys
 
from rhodecode.lib.vcs.backends import get_repo, get_backend
 
from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError, ChangesetError
 

	
 

	
 
def get_version():
 
    """
 
    Returns shorter version (digit parts only) as string.
 
    """
 
    return '.'.join((str(each) for each in VERSION[:3]))
 

	
 

	
 
def main(argv=None):
 
    if argv is None:
 
        argv = sys.argv
 
    from rhodecode.lib.vcs.cli import ExecutionManager
 
    manager = ExecutionManager(argv)
 
    manager.execute()
 
    return 0
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
rhodecode/lib/vcs/backends/base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.base
 
    ~~~~~~~~~~~~~~~~~
 

	
 
    Base for all available scm backends
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import datetime
 
from itertools import chain
 
import itertools
 

	
 
from rhodecode.lib.vcs.utils import author_name, author_email
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.helpers import get_dict_for_attrs
 
from rhodecode.lib.vcs.conf import settings
 

	
 
from rhodecode.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
 
    NodeAlreadyAddedError, NodeAlreadyChangedError, NodeAlreadyExistsError, \
 
    NodeAlreadyRemovedError, NodeDoesNotExistError, NodeNotChangedError, \
 
    RepositoryError
 
from rhodecode.lib.vcs.exceptions import (
 
    ChangesetError, EmptyRepositoryError, NodeAlreadyAddedError,
 
    NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
 
    NodeDoesNotExistError, NodeNotChangedError, RepositoryError
 
)
 

	
 

	
 
class BaseRepository(object):
 
    """
 
    Base Repository for final backends
 

	
 
    **Attributes**
 

	
 
        ``DEFAULT_BRANCH_NAME``
 
            name of default branch (i.e. "trunk" for svn, "master" for git etc.
 

	
 
        ``scm``
 
            alias of scm, i.e. *git* or *hg*
 

	
 
        ``repo``
 
            object from external api
 

	
 
        ``revisions``
 
            list of all available revisions' ids, in ascending order
 

	
 
        ``changesets``
 
            storage dict caching returned changesets
 

	
 
        ``path``
 
            absolute path to the repository
 

	
 
        ``branches``
 
            branches as list of changesets
 

	
 
        ``tags``
 
            tags as list of changesets
 
    """
 
    scm = None
 
    DEFAULT_BRANCH_NAME = None
 
    EMPTY_CHANGESET = '0' * 40
 

	
 
    def __init__(self, repo_path, create=False, **kwargs):
 
        """
 
        Initializes repository. Raises RepositoryError if repository could
 
        not be find at the given ``repo_path`` or directory at ``repo_path``
 
        exists and ``create`` is set to True.
 

	
 
        :param repo_path: local path of the repository
 
        :param create=False: if set to True, would try to craete repository.
 
        :param src_url=None: if set, should be proper url from which repository
 
          would be cloned; requires ``create`` parameter to be set to True -
 
          raises RepositoryError if src_url is set and create evaluates to
 
          False
 
@@ -806,97 +808,97 @@ class BaseInMemoryChangeset(object):
 
                    "as removed" % node.path)
 
        try:
 
            self.repository.get_changeset()
 
        except EmptyRepositoryError:
 
            raise EmptyRepositoryError("Nothing to change - try to *add* new "
 
                "nodes rather than changing them")
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.changed):
 
                raise NodeAlreadyChangedError("Node at '%s' is already "
 
                    "marked as changed" % node.path)
 
            self.changed.append(node)
 

	
 
    def remove(self, *filenodes):
 
        """
 
        Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
 
        *removed* in next commit.
 

	
 
        :raises ``NodeAlreadyRemovedError``: if node has been already marked to
 
          be *removed*
 
        :raises ``NodeAlreadyChangedError``: if node has been already marked to
 
          be *changed*
 
        """
 
        for node in filenodes:
 
            if node.path in (n.path for n in self.removed):
 
                raise NodeAlreadyRemovedError("Node is already marked to "
 
                    "for removal at %s" % node.path)
 
            if node.path in (n.path for n in self.changed):
 
                raise NodeAlreadyChangedError("Node is already marked to "
 
                    "be changed at %s" % node.path)
 
            # We only mark node as *removed* - real removal is done by
 
            # commit method
 
            self.removed.append(node)
 

	
 
    def reset(self):
 
        """
 
        Resets this instance to initial state (cleans ``added``, ``changed``
 
        and ``removed`` lists).
 
        """
 
        self.added = []
 
        self.changed = []
 
        self.removed = []
 
        self.parents = []
 

	
 
    def get_ipaths(self):
 
        """
 
        Returns generator of paths from nodes marked as added, changed or
 
        removed.
 
        """
 
        for node in chain(self.added, self.changed, self.removed):
 
        for node in itertools.chain(self.added, self.changed, self.removed):
 
            yield node.path
 

	
 
    def get_paths(self):
 
        """
 
        Returns list of paths from nodes marked as added, changed or removed.
 
        """
 
        return list(self.get_ipaths())
 

	
 
    def check_integrity(self, parents=None):
 
        """
 
        Checks in-memory changeset's integrity. Also, sets parents if not
 
        already set.
 

	
 
        :raises CommitError: if any error occurs (i.e.
 
          ``NodeDoesNotExistError``).
 
        """
 
        if not self.parents:
 
            parents = parents or []
 
            if len(parents) == 0:
 
                try:
 
                    parents = [self.repository.get_changeset(), None]
 
                except EmptyRepositoryError:
 
                    parents = [None, None]
 
            elif len(parents) == 1:
 
                parents += [None]
 
            self.parents = parents
 

	
 
        # Local parents, only if not None
 
        parents = [p for p in self.parents if p]
 

	
 
        # Check nodes marked as added
 
        for p in parents:
 
            for node in self.added:
 
                try:
 
                    p.get_node(node.path)
 
                except NodeDoesNotExistError:
 
                    pass
 
                else:
 
                    raise NodeAlreadyExistsError("Node at %s already exists "
 
                        "at %s" % (node.path, p))
 

	
 
        # Check nodes marked as changed
 
        missing = set(self.changed)
 
        not_changed = set(self.changed)
 
        if self.changed and not parents:
 
            raise NodeDoesNotExistError(str(self.changed[0].path))
 
        for p in parents:
 
            for node in self.changed:
rhodecode/lib/vcs/backends/git/changeset.py
Show inline comments
 
import re
 
from itertools import chain
 
from dulwich import objects
 
from subprocess import Popen, PIPE
 
import rhodecode
 

	
 
from rhodecode.lib.vcs.conf import settings
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.exceptions import ChangesetError
 
from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError
 
from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from rhodecode.lib.vcs.nodes import FileNode, DirNode, NodeKind, RootNode, \
 
    RemovedFileNode, SubModuleNode, ChangedFileNodesGenerator,\
 
    AddedFileNodesGenerator, RemovedFileNodesGenerator
 
from rhodecode.lib.vcs.utils import safe_unicode
 
from rhodecode.lib.vcs.utils import date_fromtimestamp
 
from rhodecode.lib.vcs.exceptions import (
 
    RepositoryError, ChangesetError, NodeDoesNotExistError, VCSError,
 
    ChangesetDoesNotExistError, ImproperArchiveTypeError
 
)
 
from rhodecode.lib.vcs.nodes import (
 
    FileNode, DirNode, NodeKind, RootNode, RemovedFileNode, SubModuleNode,
 
    ChangedFileNodesGenerator, AddedFileNodesGenerator, RemovedFileNodesGenerator
 
)
 
from rhodecode.lib.vcs.utils import (
 
    safe_unicode, safe_str, safe_int, date_fromtimestamp
 
)
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.utils2 import safe_int, safe_str
 

	
 

	
 
class GitChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at single revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self._stat_modes = {}
 
        self.repository = repository
 

	
 
        try:
 
            commit = self.repository._repo.get_object(revision)
 
            if isinstance(commit, objects.Tag):
 
                revision = commit.object[1]
 
                commit = self.repository._repo.get_object(commit.object[1])
 
        except KeyError:
 
            raise RepositoryError("Cannot get object with id %s" % revision)
 
        self.raw_id = revision
 
        self.id = self.raw_id
 
        self.short_id = self.raw_id[:12]
 
        self._commit = commit
 

	
 
        self._tree_id = commit.tree
 
        self._committer_property = 'committer'
 
        self._author_property = 'author'
 
        self._date_property = 'commit_time'
 
        self._date_tz_property = 'commit_timezone'
 
        self.revision = repository.revisions.index(revision)
 

	
 
        self.message = safe_unicode(commit.message)
 

	
 
        self.nodes = {}
 
        self._paths = {}
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_unicode(getattr(self._commit, self._committer_property))
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_unicode(getattr(self._commit, self._author_property))
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(getattr(self._commit, self._date_property),
 
                                  getattr(self._commit, self._date_tz_property))
 

	
 
@@ -142,98 +141,97 @@ class GitChangeset(BaseChangeset):
 
                        raise ChangesetError('%s is not a directory' % curdir)
 
                else:
 
                    raise ChangesetError('%s have not been found' % curdir)
 

	
 
                # cache all items from the given traversed tree
 
                for item, stat, id in tree.iteritems():
 
                    if curdir:
 
                        name = '/'.join((curdir, item))
 
                    else:
 
                        name = item
 
                    self._paths[name] = id
 
                    self._stat_modes[name] = stat
 
            if not path in self._paths:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path '%s' at revision %s"
 
                    % (path, self.short_id))
 
        return self._paths[path]
 

	
 
    def _get_kind(self, path):
 
        obj = self.repository._repo[self._get_id_for_path(path)]
 
        if isinstance(obj, objects.Blob):
 
            return NodeKind.FILE
 
        elif isinstance(obj, objects.Tree):
 
            return NodeKind.DIR
 

	
 
    def _get_filectx(self, path):
 
        path = self._fix_path(path)
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return path
 

	
 
    def _get_file_nodes(self):
 
        return chain(*(t[2] for t in self.walk()))
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(parent)
 
                for parent in self._commit.parents]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter',
 
                                              '--all').strip()
 
        rev_filter = _git_path = settings.GIT_REV_FILTER
 
        so, se = self.repository.run_git_command(
 
            "rev-list %s --children | grep '^%s'" % (rev_filter, self.raw_id)
 
        )
 

	
 
        children = []
 
        for l in so.splitlines():
 
            childs = l.split(' ')[1:]
 
            children.extend(childs)
 
        return [self.repository.get_changeset(cs) for cs in children]
 

	
 
    def next(self, branch=None):
 

	
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        def _next(changeset, branch):
 
            try:
 
                next_ = changeset.revision + 1
 
                next_rev = changeset.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = changeset.repository.get_changeset(next_rev)
 

	
 
            if branch and branch != cs.branch:
 
                return _next(cs, branch)
 

	
 
            return cs
 

	
 
        return _next(self, branch)
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        def _prev(changeset, branch):
 
            try:
 
                prev_ = changeset.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = changeset.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 

	
 
            cs = changeset.repository.get_changeset(prev_rev)
 

	
 
            if branch and branch != cs.branch:
 
@@ -328,97 +326,97 @@ class GitChangeset(BaseChangeset):
 
        TODO: This function now uses os underlying 'git' command which is
 
        generally not good. Should be replaced with algorithm iterating
 
        commits.
 
        """
 
        cmd = 'blame -l --root -r %s -- "%s"' % (self.id, path)
 
        # -l     ==> outputs long shas (and we need all 40 characters)
 
        # --root ==> doesn't put '^' character for bounderies
 
        # -r sha ==> blames for the given revision
 
        so, se = self.repository.run_git_command(cmd)
 

	
 
        for i, blame_line in enumerate(so.split('\n')[:-1]):
 
            ln_no = i + 1
 
            sha, line = re.split(r' ', blame_line, 1)
 
            yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 

	
 
        """
 
        allowed_kinds = settings.ARCHIVE_SPECS.keys()
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s', allowed_kinds)
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        if kind == 'zip':
 
            frmt = 'zip'
 
        else:
 
            frmt = 'tar'
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        _git_path = settings.GIT_EXECUTABLE_PATH
 
        cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
 
                                                frmt, prefix, self.raw_id)
 
        if kind == 'tgz':
 
            cmd += ' | gzip -9'
 
        elif kind == 'tbz2':
 
            cmd += ' | bzip2 -9'
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 
        popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
 
            cwd=self.repository.path)
 

	
 
        buffer_size = 1024 * 8
 
        chunk = popen.stdout.read(buffer_size)
 
        while chunk:
 
            stream.write(chunk)
 
            chunk = popen.stdout.read(buffer_size)
 
        # Make sure all descriptors would be read
 
        popen.communicate()
 

	
 
    def get_nodes(self, path):
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %s at "
 
                " '%s'" % (self.revision, path))
 
        path = self._fix_path(path)
 
        id = self._get_id_for_path(path)
 
        tree = self.repository._repo[id]
 
        dirnodes = []
 
        filenodes = []
 
        als = self.repository.alias
 
        for name, stat, id in tree.iteritems():
 
            if objects.S_ISGITLINK(stat):
 
                dirnodes.append(SubModuleNode(name, url=None, changeset=id,
 
                                              alias=als))
 
                continue
 

	
 
            obj = self.repository._repo.get_object(id)
 
            if path != '':
 
                obj_path = '/'.join((path, name))
 
            else:
 
                obj_path = name
 
            if obj_path not in self._stat_modes:
 
                self._stat_modes[obj_path] = stat
 
            if isinstance(obj, objects.Tree):
 
                dirnodes.append(DirNode(obj_path, changeset=self))
 
            elif isinstance(obj, objects.Blob):
 
                filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
 
@@ -427,128 +425,128 @@ class GitChangeset(BaseChangeset):
 
                                     "or Blob, is %r" % type(obj))
 
        nodes = dirnodes + filenodes
 
        for node in nodes:
 
            if not node.path in self.nodes:
 
                self.nodes[node.path] = node
 
        nodes.sort()
 
        return nodes
 

	
 
    def get_node(self, path):
 
        if isinstance(path, unicode):
 
            path = path.encode('utf-8')
 
        path = self._fix_path(path)
 
        if not path in self.nodes:
 
            try:
 
                id_ = self._get_id_for_path(path)
 
            except ChangesetError:
 
                raise NodeDoesNotExistError("Cannot find one of parents' "
 
                    "directories for a given path: %s" % path)
 

	
 
            _GL = lambda m: m and objects.S_ISGITLINK(m)
 
            if _GL(self._stat_modes.get(path)):
 
                node = SubModuleNode(path, url=None, changeset=id_,
 
                                     alias=self.repository.alias)
 
            else:
 
                obj = self.repository._repo.get_object(id_)
 

	
 
                if isinstance(obj, objects.Tree):
 
                    if path == '':
 
                        node = RootNode(changeset=self)
 
                    else:
 
                        node = DirNode(path, changeset=self)
 
                    node._tree = obj
 
                elif isinstance(obj, objects.Blob):
 
                    node = FileNode(path, changeset=self)
 
                    node._blob = obj
 
                else:
 
                    raise NodeDoesNotExistError("There is no file nor directory "
 
                        "at the given path '%s' at revision %s"
 
                        % (path, self.short_id))
 
            # cache node
 
            self.nodes[path] = node
 
        return self.nodes[path]
 

	
 
    @LazyProperty
 
    def affected_files(self):
 
        """
 
        Get's a fast accessible file changes for given changeset
 
        """
 
        a, m, d = self._changes_cache
 
        return list(a.union(m).union(d))
 
        added, modified, deleted = self._changes_cache
 
        return list(added.union(modified).union(deleted))
 

	
 
    @LazyProperty
 
    def _diff_name_status(self):
 
        output = []
 
        for parent in self.parents:
 
            cmd = 'diff --name-status %s %s --encoding=utf8' % (parent.raw_id,
 
                                                                self.raw_id)
 
            so, se = self.repository.run_git_command(cmd)
 
            output.append(so.strip())
 
        return '\n'.join(output)
 

	
 
    @LazyProperty
 
    def _changes_cache(self):
 
        added = set()
 
        modified = set()
 
        deleted = set()
 
        _r = self.repository._repo
 

	
 
        parents = self.parents
 
        if not self.parents:
 
            parents = [EmptyChangeset()]
 
        for parent in parents:
 
            if isinstance(parent, EmptyChangeset):
 
                oid = None
 
            else:
 
                oid = _r[parent.raw_id].tree
 
            changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
 
            for (oldpath, newpath), (_, _), (_, _) in changes:
 
                if newpath and oldpath:
 
                    modified.add(newpath)
 
                elif newpath and not oldpath:
 
                    added.add(newpath)
 
                elif not newpath and oldpath:
 
                    deleted.add(oldpath)
 
        return added, modified, deleted
 

	
 
    def _get_paths_for_status(self, status):
 
        """
 
        Returns sorted list of paths for given ``status``.
 

	
 
        :param status: one of: *added*, *modified* or *deleted*
 
        """
 
        a, m, d = self._changes_cache
 
        added, modified, deleted = self._changes_cache
 
        return sorted({
 
            'added': list(a),
 
            'modified': list(m),
 
            'deleted': list(d)}[status]
 
            'added': list(added),
 
            'modified': list(modified),
 
            'deleted': list(deleted)}[status]
 
        )
 

	
 
    @LazyProperty
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return list(self._get_file_nodes())
 
        return AddedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('added')], self)
 

	
 
    @LazyProperty
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return ChangedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('modified')], self)
 

	
 
    @LazyProperty
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return RemovedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('deleted')], self)
rhodecode/lib/vcs/backends/git/inmemory.py
Show inline comments
 
@@ -105,97 +105,96 @@ class GitInMemoryChangeset(BaseInMemoryC
 
            paths = node.path.split('/')
 
            tree = commit_tree
 
            trees = [tree]
 
            # Traverse deep into the forest...
 
            for path in paths:
 
                try:
 
                    obj = self.repository._repo[tree[path][1]]
 
                    if isinstance(obj, objects.Tree):
 
                        trees.append(obj)
 
                        tree = obj
 
                except KeyError:
 
                    break
 
            # Cut down the blob and all rotten trees on the way back...
 
            for path, tree in reversed(zip(paths, trees)):
 
                del tree[path]
 
                if tree:
 
                    # This tree still has elements - don't remove it or any
 
                    # of it's parents
 
                    break
 

	
 
        object_store.add_object(commit_tree)
 

	
 
        # Create commit
 
        commit = objects.Commit()
 
        commit.tree = commit_tree.id
 
        commit.parents = [p._commit.id for p in self.parents if p]
 
        commit.author = commit.committer = safe_str(author)
 
        commit.encoding = ENCODING
 
        commit.message = safe_str(message)
 

	
 
        # Compute date
 
        if date is None:
 
            date = time.time()
 
        elif isinstance(date, datetime.datetime):
 
            date = time.mktime(date.timetuple())
 

	
 
        author_time = kwargs.pop('author_time', date)
 
        commit.commit_time = int(date)
 
        commit.author_time = int(author_time)
 
        tz = time.timezone
 
        author_tz = kwargs.pop('author_timezone', tz)
 
        commit.commit_timezone = tz
 
        commit.author_timezone = author_tz
 

	
 
        object_store.add_object(commit)
 

	
 
        ref = 'refs/heads/%s' % branch
 
        repo.refs[ref] = commit.id
 
        repo.refs.set_symbolic_ref('HEAD', ref)
 

	
 
        # Update vcs repository object & recreate dulwich repo
 
        self.repository.revisions.append(commit.id)
 
        # invalidate parsed refs after commit
 
        self.repository._parsed_refs = self.repository._get_parsed_refs()
 
        tip = self.repository.get_changeset()
 
        self.reset()
 
        return tip
 

	
 
    def _get_missing_trees(self, path, root_tree):
 
        """
 
        Creates missing ``Tree`` objects for the given path.
 

	
 
        :param path: path given as a string. It may be a path to a file node
 
          (i.e. ``foo/bar/baz.txt``) or directory path - in that case it must
 
          end with slash (i.e. ``foo/bar/``).
 
        :param root_tree: ``dulwich.objects.Tree`` object from which we start
 
          traversing (should be commit's root tree)
 
        """
 
        dirpath = posixpath.split(path)[0]
 
        dirs = dirpath.split('/')
 
        if not dirs or dirs == ['']:
 
            return []
 

	
 
        def get_tree_for_dir(tree, dirname):
 
            for name, mode, id in tree.iteritems():
 
                if name == dirname:
 
                    obj = self.repository._repo[id]
 
                    if isinstance(obj, objects.Tree):
 
                        return obj
 
                    else:
 
                        raise RepositoryError("Cannot create directory %s "
 
                        "at tree %s as path is occupied and is not a "
 
                        "Tree" % (dirname, tree))
 
            return None
 

	
 
        trees = []
 
        parent = root_tree
 
        for dirname in dirs:
 
            tree = get_tree_for_dir(parent, dirname)
 
            if tree is None:
 
                tree = objects.Tree()
 
                dirmode = 040000
 
                parent.add(dirmode, dirname, tree.id)
 
                parent = tree
 
            # Always append tree
 
            trees.append(tree)
 
        return trees
rhodecode/lib/vcs/backends/git/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.git
 
    ~~~~~~~~~~~~~~~~
 
    vcs.backends.git.repository
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Git backend implementation.
 
    Git repository implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import os
 
import re
 
import time
 
import posixpath
 
import logging
 
import traceback
 
import urllib
 
import urllib2
 
from dulwich.repo import Repo, NotGitRepository
 
import logging
 
import posixpath
 
import string
 

	
 
from dulwich.objects import Tag
 
from string import Template
 
from dulwich.repo import Repo, NotGitRepository
 

	
 
import rhodecode
 
from rhodecode.lib.vcs import subprocessio
 
from rhodecode.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from rhodecode.lib.vcs.exceptions import EmptyRepositoryError
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.exceptions import TagAlreadyExistError
 
from rhodecode.lib.vcs.exceptions import TagDoesNotExistError
 
from rhodecode.lib.vcs.conf import settings
 

	
 
from rhodecode.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError,
 
    RepositoryError, TagAlreadyExistError, TagDoesNotExistError
 
)
 
from rhodecode.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty, ThreadLocalLazyProperty
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
 
from rhodecode.lib.vcs.utils.paths import abspath
 
from rhodecode.lib.vcs.utils.paths import get_user_home
 
from .workdir import GitWorkdir
 
from rhodecode.lib.vcs.utils.paths import abspath, get_user_home
 

	
 
from rhodecode.lib.vcs.utils.hgcompat import (
 
    hg_url, httpbasicauthhandler, httpdigestauthhandler
 
)
 

	
 
from .changeset import GitChangeset
 
from .config import ConfigFile
 
from .inmemory import GitInMemoryChangeset
 
from .config import ConfigFile
 
from rhodecode.lib import subprocessio
 

	
 
from .workdir import GitWorkdir
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class GitRepository(BaseRepository):
 
    """
 
    Git repository backend.
 
    """
 
    DEFAULT_BRANCH_NAME = 'master'
 
    scm = 'git'
 

	
 
    def __init__(self, repo_path, create=False, src_url=None,
 
                 update_after_clone=False, bare=False):
 

	
 
        self.path = abspath(repo_path)
 
        repo = self._get_repo(create, src_url, update_after_clone, bare)
 
        self.bare = repo.bare
 

	
 
        self._config_files = [
 
            bare and abspath(self.path, 'config')
 
                     or abspath(self.path, '.git', 'config'),
 
            abspath(get_user_home(), '.gitconfig'),
 
        ]
 

	
 
    @property
 
    def _repo(self):
 
        return Repo(self.path)
 

	
 
    @property
 
    def head(self):
 
        try:
 
            return self._repo.head()
 
        except KeyError:
 
            return None
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
        """
 
        return self._get_all_revisions()
 

	
 
    @classmethod
 
    def _run_git_command(cls, cmd, **opts):
 
        """
 
        Runs given ``cmd`` as git command and returns tuple
 
        (stdout, stderr).
 

	
 
        :param cmd: git command to be executed
 
        :param opts: env options to pass into Subprocess command
 
        """
 

	
 
        if '_bare' in opts:
 
            _copts = []
 
            del opts['_bare']
 
        else:
 
            _copts = ['-c', 'core.quotepath=false', ]
 
        safe_call = False
 
        if '_safe' in opts:
 
            #no exc on failure
 
            del opts['_safe']
 
            safe_call = True
 

	
 
        _str_cmd = False
 
        if isinstance(cmd, basestring):
 
            cmd = [cmd]
 
            _str_cmd = True
 

	
 
        gitenv = os.environ
 
        # need to clean fix GIT_DIR !
 
        if 'GIT_DIR' in gitenv:
 
            del gitenv['GIT_DIR']
 
        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 

	
 
        _git_path = rhodecode.CONFIG.get('git_path', 'git')
 
        _git_path = settings.GIT_EXECUTABLE_PATH
 
        cmd = [_git_path] + _copts + cmd
 
        if _str_cmd:
 
            cmd = ' '.join(cmd)
 
        try:
 
            _opts = dict(
 
                env=gitenv,
 
                shell=False,
 
            )
 
            _opts.update(opts)
 
            p = subprocessio.SubprocessIOChunker(cmd, **_opts)
 
        except (EnvironmentError, OSError), err:
 
            tb_err = ("Couldn't run git command (%s).\n"
 
                      "Original error was:%s\n" % (cmd, err))
 
            log.error(tb_err)
 
            if safe_call:
 
                return '', err
 
            else:
 
                raise RepositoryError(tb_err)
 

	
 
        return ''.join(p.output), ''.join(p.error)
 

	
 
    def run_git_command(self, cmd):
 
        opts = {}
 
        if os.path.isdir(self.path):
 
            opts['cwd'] = self.path
 
        return self._run_git_command(cmd, **opts)
 

	
 
    @classmethod
 
    def _check_url(cls, url):
 
        """
 
        Functon will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that mercurial will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError
 
        """
 
        from mercurial.util import url as Url
 

	
 
        # those authnadlers are patched for python 2.6.5 bug an
 
        # infinit looping when given invalid resources
 
        from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
 

	
 
        # check first if it's not an local url
 
        if os.path.isdir(url) or url.startswith('file:'):
 
            return True
 

	
 
        if('+' in url[:url.find('://')]):
 
            url = url[url.find('+') + 1:]
 

	
 
        handlers = []
 
        test_uri, authinfo = Url(url).authinfo()
 
        test_uri, authinfo = hg_url(url).authinfo()
 
        if not test_uri.endswith('info/refs'):
 
            test_uri = test_uri.rstrip('/') + '/info/refs'
 
        if authinfo:
 
            #create a password manager
 
            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((httpbasicauthhandler(passmgr),
 
                             httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib2.build_opener(*handlers)
 
        o.addheaders = [('User-Agent', 'git/1.7.8.0')]  # fake some git
 

	
 
        q = {"service": 'git-upload-pack'}
 
        qs = '?%s' % urllib.urlencode(q)
 
        cu = "%s%s" % (test_uri, qs)
 
        req = urllib2.Request(cu, None, {})
 

	
 
        try:
 
            resp = o.open(req)
 
            return resp.code == 200
 
        except Exception, e:
 
            # means it cannot be cloned
 
            raise urllib2.URLError("[%s] %s" % (url, e))
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False,
 
                  bare=False):
 
        if create and os.path.exists(self.path):
 
            raise RepositoryError("Location already exist")
 
        if src_url and not create:
 
            raise RepositoryError("Create should be set to True if src_url is "
 
                                  "given (clone operation creates repository)")
 
        try:
 
            if create and src_url:
 
                GitRepository._check_url(src_url)
 
                self.clone(src_url, update_after_clone, bare)
 
                return Repo(self.path)
 
            elif create:
 
                os.mkdir(self.path)
 
                if bare:
 
                    return Repo.init_bare(self.path)
 
                else:
 
                    return Repo.init(self.path)
 
            else:
 
                return self._repo
 
        except (NotGitRepository, OSError), err:
 
            raise RepositoryError(err)
 

	
 
    def _get_all_revisions(self):
 
        # we must check if this repo is not empty, since later command
 
        # fails if it is. And it's cheaper to ask than throw the subprocess
 
        # errors
 
        try:
 
            self._repo.head()
 
        except KeyError:
 
            return []
 
        rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter',
 
                                                      '--all').strip()
 

	
 
        rev_filter = _git_path = settings.GIT_REV_FILTER
 
        cmd = 'rev-list %s --reverse --date-order' % (rev_filter)
 
        try:
 
            so, se = self.run_git_command(cmd)
 
        except RepositoryError:
 
            # Can be raised for empty repositories
 
            return []
 
        return so.splitlines()
 

	
 
    def _get_all_revisions2(self):
 
        #alternate implementation using dulwich
 
        includes = [x[1][0] for x in self._parsed_refs.iteritems()
 
                    if x[1][1] != 'T']
 
        return [c.commit.id for c in self._repo.get_walker(include=includes)]
 

	
 
    def _get_revision(self, revision):
 
        """
 
        For git backend we always return integer here. This way we ensure
 
        that changset's revision attribute would become integer.
 
        """
 
        pattern = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
 
        is_bstr = lambda o: isinstance(o, (str, unicode))
 
        is_null = lambda o: len(o) == revision.count('0')
 

	
 
        if len(self.revisions) == 0:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in (None, '', 'tip', 'HEAD', 'head', -1):
 
            revision = self.revisions[-1]
 

	
 
        if ((is_bstr(revision) and revision.isdigit() and len(revision) < 12)
 
            or isinstance(revision, int) or is_null(revision)):
 
            try:
 
                revision = self.revisions[int(revision)]
 
            except Exception:
 
                raise ChangesetDoesNotExistError("Revision %s does not exist "
 
                    "for this repository" % (revision))
 

	
 
        elif is_bstr(revision):
 
            # get by branch/tag name
 
            _ref_revision = self._parsed_refs.get(revision)
 
            _tags_shas = self.tags.values()
 
            if _ref_revision:  # and _ref_revision[1] in ['H', 'RH', 'T']:
 
                return _ref_revision[0]
 

	
 
            # maybe it's a tag ? we don't have them in self.revisions
 
            elif revision in _tags_shas:
 
                return _tags_shas[_tags_shas.index(revision)]
 

	
 
@@ -457,101 +454,100 @@ class GitRepository(BaseRepository):
 
        at the given revision or head (most recent commit) if None given.
 
        """
 
        if isinstance(revision, GitChangeset):
 
            return revision
 
        revision = self._get_revision(revision)
 
        changeset = GitChangeset(repository=self, revision=revision)
 
        return changeset
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
           end_date=None, branch_name=None, reverse=False):
 
        """
 
        Returns iterator of ``GitChangeset`` objects from start to end (both
 
        are inclusive), in ascending date order (unless ``reverse`` is set).
 

	
 
        :param start: changeset ID, as str; first returned changeset
 
        :param end: changeset ID, as str; last returned changeset
 
        :param start_date: if specified, changesets with commit date less than
 
          ``start_date`` would be filtered out from returned set
 
        :param end_date: if specified, changesets with commit date greater than
 
          ``end_date`` would be filtered out from returned set
 
        :param branch_name: if specified, changesets not reachable from given
 
          branch would be filtered out from returned set
 
        :param reverse: if ``True``, returned generator would be reversed
 
          (meaning that returned changesets would have descending date order)
 

	
 
        :raise BranchDoesNotExistError: If given ``branch_name`` does not
 
            exist.
 
        :raise ChangesetDoesNotExistError: If changeset for given ``start`` or
 
          ``end`` could not be found.
 

	
 
        """
 
        if branch_name and branch_name not in self.branches:
 
            raise BranchDoesNotExistError("Branch '%s' not found" \
 
                                          % branch_name)
 
        # %H at format means (full) commit hash, initial hashes are retrieved
 
        # in ascending date order
 
        cmd_template = 'log --date-order --reverse --pretty=format:"%H"'
 
        cmd_params = {}
 
        if start_date:
 
            cmd_template += ' --since "$since"'
 
            cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S')
 
        if end_date:
 
            cmd_template += ' --until "$until"'
 
            cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S')
 
        if branch_name:
 
            cmd_template += ' $branch_name'
 
            cmd_params['branch_name'] = branch_name
 
        else:
 
            rev_filter = _git_path = rhodecode.CONFIG.get('git_rev_filter',
 
                                                          '--all').strip()
 
            rev_filter = _git_path = settings.GIT_REV_FILTER
 
            cmd_template += ' %s' % (rev_filter)
 

	
 
        cmd = Template(cmd_template).safe_substitute(**cmd_params)
 
        cmd = string.Template(cmd_template).safe_substitute(**cmd_params)
 
        revs = self.run_git_command(cmd)[0].splitlines()
 
        start_pos = 0
 
        end_pos = len(revs)
 
        if start:
 
            _start = self._get_revision(start)
 
            try:
 
                start_pos = revs.index(_start)
 
            except ValueError:
 
                pass
 

	
 
        if end is not None:
 
            _end = self._get_revision(end)
 
            try:
 
                end_pos = revs.index(_end)
 
            except ValueError:
 
                pass
 

	
 
        if None not in [start, end] and start_pos > end_pos:
 
            raise RepositoryError('start cannot be after end')
 

	
 
        if end_pos is not None:
 
            end_pos += 1
 

	
 
        revs = revs[start_pos:end_pos]
 
        if reverse:
 
            revs = reversed(revs)
 
        return CollectionGenerator(self, revs)
 

	
 
    def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False,
 
                 context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``.
 
        """
 
        flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40']
 
        if ignore_whitespace:
 
            flags.append('-w')
 

	
 
        if hasattr(rev1, 'raw_id'):
rhodecode/lib/vcs/backends/hg/changeset.py
Show inline comments
 
import os
 
import posixpath
 

	
 
from rhodecode.lib.vcs.conf import settings
 
from rhodecode.lib.vcs.backends.base import BaseChangeset
 
from rhodecode.lib.vcs.conf import settings
 
from rhodecode.lib.vcs.exceptions import  ChangesetDoesNotExistError, \
 
    ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, VCSError
 
from rhodecode.lib.vcs.nodes import AddedFileNodesGenerator, \
 
    ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, \
 
    RemovedFileNodesGenerator, RootNode, SubModuleNode
 

	
 
from rhodecode.lib.vcs.exceptions import (
 
    ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError,
 
    NodeDoesNotExistError, VCSError
 
)
 
from rhodecode.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
 
    NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode
 
)
 
from rhodecode.lib.vcs.utils import safe_str, safe_unicode, date_fromtimestamp
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
 
from rhodecode.lib.vcs.utils.hgcompat import archival, hex
 

	
 

	
 
class MercurialChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at the single revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self.repository = repository
 
        self.raw_id = revision
 
        self._ctx = repository._repo[revision]
 
        self.revision = self._ctx._rev
 
        self.nodes = {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return map(safe_unicode, self._ctx.tags())
 

	
 
    @LazyProperty
 
    def branch(self):
 
        return  safe_unicode(self._ctx.branch())
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        return map(safe_unicode, self._ctx.bookmarks())
 

	
 
    @LazyProperty
 
    def message(self):
 
        return safe_unicode(self._ctx.description())
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_unicode(self.author)
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_unicode(self._ctx.user())
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(*self._ctx.date())
 

	
 
    @LazyProperty
 
    def _timestamp(self):
rhodecode/lib/vcs/backends/hg/repository.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.backends.hg.repository
 
    ~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
    Mercurial repository implementation.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import os
 
import time
 
import datetime
 
import urllib
 
import urllib2
 
import logging
 
import datetime
 

	
 

	
 
from rhodecode.lib.vcs.backends.base import BaseRepository, CollectionGenerator
 
from .workdir import MercurialWorkdir
 
from .changeset import MercurialChangeset
 
from .inmemory import MercurialInMemoryChangeset
 
from rhodecode.lib.vcs.conf import settings
 

	
 
from rhodecode.lib.vcs.exceptions import BranchDoesNotExistError, \
 
    ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, \
 
    VCSError, TagAlreadyExistError, TagDoesNotExistError
 
from rhodecode.lib.vcs.utils import author_email, author_name, date_fromtimestamp, \
 
    makedate, safe_unicode
 
from rhodecode.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError,
 
    RepositoryError, VCSError, TagAlreadyExistError, TagDoesNotExistError
 
)
 
from rhodecode.lib.vcs.utils import (
 
    author_email, author_name, date_fromtimestamp, makedate, safe_unicode
 
)
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils.ordered_dict import OrderedDict
 
from rhodecode.lib.vcs.utils.paths import abspath
 
from rhodecode.lib.vcs.utils.hgcompat import (
 
    ui, nullid, match, patch, diffopts, clone, get_contact, pull,
 
    localrepository, RepoLookupError, Abort, RepoError, hex, scmutil, hg_url,
 
    httpbasicauthhandler, httpdigestauthhandler
 
)
 

	
 
from rhodecode.lib.vcs.utils.hgcompat import ui, nullid, match, patch, \
 
    diffopts, clone, get_contact, pull, localrepository, RepoLookupError, \
 
    Abort, RepoError, hex, scmutil
 
from .changeset import MercurialChangeset
 
from .inmemory import MercurialInMemoryChangeset
 
from .workdir import MercurialWorkdir
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class MercurialRepository(BaseRepository):
 
    """
 
    Mercurial repository backend
 
    """
 
    DEFAULT_BRANCH_NAME = 'default'
 
    scm = 'hg'
 

	
 
    def __init__(self, repo_path, create=False, baseui=None, src_url=None,
 
                 update_after_clone=False):
 
        """
 
        Raises RepositoryError if repository could not be find at the given
 
        ``repo_path``.
 

	
 
        :param repo_path: local path of the repository
 
        :param create=False: if set to True, would try to create repository if
 
           it does not exist rather than raising exception
 
        :param baseui=None: user data
 
        :param src_url=None: would try to clone repository from given location
 
        :param update_after_clone=False: sets update of working copy after
 
          making a clone
 
        """
 

	
 
        if not isinstance(repo_path, str):
 
            raise VCSError('Mercurial backend requires repository path to '
 
                           'be instance of <str> got %s instead' %
 
                           type(repo_path))
 

	
 
        self.path = abspath(repo_path)
 
        self.baseui = baseui or ui.ui()
 
        # We've set path and ui, now we can set _repo itself
 
        self._repo = self._get_repo(create, src_url, update_after_clone)
 

	
 
    @property
 
    def _empty(self):
 
        """
 
        Checks if repository is empty without any changesets
 
        """
 
        # TODO: Following raises errors when using InMemoryChangeset...
 
        # return len(self._repo.changelog) == 0
 
        return len(self.revisions) == 0
 

	
 
    @LazyProperty
 
    def revisions(self):
 
        """
 
        Returns list of revisions' ids, in ascending order.  Being lazy
 
        attribute allows external tools to inject shas from cache.
 
@@ -227,111 +247,105 @@ class MercurialRepository(BaseRepository
 
    def get_diff(self, rev1, rev2, path='', ignore_whitespace=False,
 
                  context=3):
 
        """
 
        Returns (git like) *diff*, as plain text. Shows changes introduced by
 
        ``rev2`` since ``rev1``.
 

	
 
        :param rev1: Entry point from which diff is shown. Can be
 
          ``self.EMPTY_CHANGESET`` - in this case, patch showing all
 
          the changes since empty state of the repository until ``rev2``
 
        :param rev2: Until which revision changes should be shown.
 
        :param ignore_whitespace: If set to ``True``, would not show whitespace
 
          changes. Defaults to ``False``.
 
        :param context: How many lines before/after changed lines should be
 
          shown. Defaults to ``3``.
 
        """
 
        if hasattr(rev1, 'raw_id'):
 
            rev1 = getattr(rev1, 'raw_id')
 

	
 
        if hasattr(rev2, 'raw_id'):
 
            rev2 = getattr(rev2, 'raw_id')
 

	
 
        # Check if given revisions are present at repository (may raise
 
        # ChangesetDoesNotExistError)
 
        if rev1 != self.EMPTY_CHANGESET:
 
            self.get_changeset(rev1)
 
        self.get_changeset(rev2)
 
        if path:
 
            file_filter = match(self.path, '', [path])
 
        else:
 
            file_filter = None
 

	
 
        return ''.join(patch.diff(self._repo, rev1, rev2, match=file_filter,
 
                          opts=diffopts(git=True,
 
                                        ignorews=ignore_whitespace,
 
                                        context=context)))
 

	
 
    @classmethod
 
    def _check_url(cls, url):
 
        """
 
        Function will check given url and try to verify if it's a valid
 
        link. Sometimes it may happened that mercurial will issue basic
 
        auth request that can cause whole API to hang when used from python
 
        or other external calls.
 

	
 
        On failures it'll raise urllib2.HTTPError, return code 200 if url
 
        is valid or True if it's a local path
 
        """
 

	
 
        from mercurial.util import url as Url
 

	
 
        # those authnadlers are patched for python 2.6.5 bug an
 
        # infinit looping when given invalid resources
 
        from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
 

	
 
        # check first if it's not an local url
 
        if os.path.isdir(url) or url.startswith('file:'):
 
            return True
 

	
 
        if('+' in url[:url.find('://')]):
 
            url = url[url.find('+') + 1:]
 

	
 
        handlers = []
 
        test_uri, authinfo = Url(url).authinfo()
 
        test_uri, authinfo = hg_url(url).authinfo()
 

	
 
        if authinfo:
 
            #create a password manager
 
            passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
 
            passmgr.add_password(*authinfo)
 

	
 
            handlers.extend((httpbasicauthhandler(passmgr),
 
                             httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib2.build_opener(*handlers)
 
        o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
 
                        ('Accept', 'application/mercurial-0.1')]
 

	
 
        q = {"cmd": 'between'}
 
        q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
 
        qs = '?%s' % urllib.urlencode(q)
 
        cu = "%s%s" % (test_uri, qs)
 
        req = urllib2.Request(cu, None, {})
 

	
 
        try:
 
            resp = o.open(req)
 
            return resp.code == 200
 
        except Exception, e:
 
            # means it cannot be cloned
 
            raise urllib2.URLError("[%s] %s" % (url, e))
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False):
 
        """
 
        Function will check for mercurial repository in given path and return
 
        a localrepo object. If there is no repository in that path it will
 
        raise an exception unless ``create`` parameter is set to True - in
 
        that case repository would be created and returned.
 
        If ``src_url`` is given, would try to clone repository from the
 
        location at given clone_point. Additionally it'll make update to
 
        working copy accordingly to ``update_after_clone`` flag
 
        """
 

	
 
        try:
 
            if src_url:
 
                url = str(self._get_url(src_url))
 
                opts = {}
 
                if not update_after_clone:
 
                    opts.update({'noupdate': True})
 
                try:
 
                    MercurialRepository._check_url(url)
 
                    clone(self.baseui, url, self.path, **opts)
 
#                except urllib2.URLError:
 
#                    raise Abort("Got HTTP 404 error")
 
@@ -441,98 +455,100 @@ class MercurialRepository(BaseRepository
 
        Returns ``MercurialChangeset`` object representing repository's
 
        changeset at the given ``revision``.
 
        """
 
        revision = self._get_revision(revision)
 
        changeset = MercurialChangeset(repository=self, revision=revision)
 
        return changeset
 

	
 
    def get_changesets(self, start=None, end=None, start_date=None,
 
                       end_date=None, branch_name=None, reverse=False):
 
        """
 
        Returns iterator of ``MercurialChangeset`` objects from start to end
 
        (both are inclusive)
 

	
 
        :param start: None, str, int or mercurial lookup format
 
        :param end:  None, str, int or mercurial lookup format
 
        :param start_date:
 
        :param end_date:
 
        :param branch_name:
 
        :param reversed: return changesets in reversed order
 
        """
 

	
 
        start_raw_id = self._get_revision(start)
 
        start_pos = self.revisions.index(start_raw_id) if start else None
 
        end_raw_id = self._get_revision(end)
 
        end_pos = self.revisions.index(end_raw_id) if end else None
 

	
 
        if None not in [start, end] and start_pos > end_pos:
 
            raise RepositoryError("Start revision '%s' cannot be "
 
                                  "after end revision '%s'" % (start, end))
 

	
 
        if branch_name and branch_name not in self.allbranches.keys():
 
            raise BranchDoesNotExistError('Branch %s not found in'
 
                                  ' this repository' % branch_name)
 
        if end_pos is not None:
 
            end_pos += 1
 
        #filter branches
 
        filter_ = []
 
        if branch_name:
 
            filter_.append('branch("%s")' % (branch_name))
 

	
 
        if start_date:
 
            filter_.append('date(">%s")' % start_date)
 
        if end_date:
 
            filter_.append('date("<%s")' % end_date)
 
        if filter_:
 
            revisions = scmutil.revrange(self._repo, filter_)
 
        else:
 
            revisions = self.revisions
 
        revs = reversed(revisions[start_pos:end_pos]) if reverse else \
 
                revisions[start_pos:end_pos]
 

	
 
        revs = revisions[start_pos:end_pos]
 
        if reverse:
 
            revs = reversed(revs)
 

	
 
        return CollectionGenerator(self, revs)
 

	
 
    def pull(self, url):
 
        """
 
        Tries to pull changes from external location.
 
        """
 
        url = self._get_url(url)
 
        try:
 
            pull(self.baseui, self._repo, url)
 
        except Abort, err:
 
            # Propagate error but with vcs's type
 
            raise RepositoryError(str(err))
 

	
 
    @LazyProperty
 
    def workdir(self):
 
        """
 
        Returns ``Workdir`` instance for this repository.
 
        """
 
        return MercurialWorkdir(self)
 

	
 
    def get_config_value(self, section, name=None, config_file=None):
 
        """
 
        Returns configuration value for a given [``section``] and ``name``.
 

	
 
        :param section: Section we want to retrieve value from
 
        :param name: Name of configuration we want to retrieve
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        if config_file is None:
 
            config_file = []
 
        elif isinstance(config_file, basestring):
 
            config_file = [config_file]
 

	
 
        config = self._repo.ui
 
        for path in config_file:
 
            config.readconfig(path)
 
        return config.config(section, name)
 

	
 
    def get_user_name(self, config_file=None):
 
        """
 
        Returns user's name from global configuration file.
 

	
 
        :param config_file: A path to file which should be used to retrieve
 
          configuration from (might also be a list of file paths)
 
        """
 
        username = self.get_config_value('ui', 'username')
rhodecode/lib/vcs/conf/settings.py
Show inline comments
 
import os
 
import tempfile
 
from rhodecode.lib.vcs.utils import aslist
 
from rhodecode.lib.vcs.utils.paths import get_user_home
 

	
 
abspath = lambda * p: os.path.abspath(os.path.join(*p))
 

	
 
VCSRC_PATH = os.environ.get('VCSRC_PATH')
 

	
 
if not VCSRC_PATH:
 
    HOME_ = get_user_home()
 
    if not HOME_:
 
        HOME_ = tempfile.gettempdir()
 

	
 
VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc')
 
if os.path.isdir(VCSRC_PATH):
 
    VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py')
 

	
 
# list of default encoding used in safe_unicode/safe_str methods
 
DEFAULT_ENCODINGS = aslist('utf8')
 

	
 
# path to git executable runned by run_git_command function
 
GIT_EXECUTABLE_PATH = 'git'
 
# can be also --branches --tags
 
GIT_REV_FILTER = '--all'
 

	
 
BACKENDS = {
 
    'hg': 'vcs.backends.hg.MercurialRepository',
 
    'git': 'vcs.backends.git.GitRepository',
 
    'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
 
    'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
 
}
 

	
 
ARCHIVE_SPECS = {
 
    'tar': ('application/x-tar', '.tar'),
 
    'tbz2': ('application/x-bzip2', '.tar.bz2'),
 
    'tgz': ('application/x-gzip', '.tar.gz'),
 
    'zip': ('application/zip', '.zip'),
 
}
 

	
 
BACKENDS = {
 
    'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
 
    'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
 
}
rhodecode/lib/vcs/nodes.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.nodes
 
    ~~~~~~~~~
 

	
 
    Module holding everything related to vcs nodes.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 
import os
 
import stat
 
import posixpath
 
import mimetypes
 

	
 
from pygments import lexers
 

	
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 
from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
 
from rhodecode.lib.vcs.utils.lazy import LazyProperty
 
from rhodecode.lib.vcs.utils import safe_unicode
 
from rhodecode.lib.vcs.exceptions import NodeError
 
from rhodecode.lib.vcs.exceptions import RemovedFileNodeError
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 

	
 

	
 
class NodeKind:
 
    SUBMODULE = -1
 
    DIR = 1
 
    FILE = 2
 

	
 

	
 
class NodeState:
 
    ADDED = u'added'
 
    CHANGED = u'changed'
 
    NOT_CHANGED = u'not changed'
 
    REMOVED = u'removed'
 

	
 

	
 
class NodeGeneratorBase(object):
 
    """
 
    Base class for removed added and changed filenodes, it's a lazy generator
 
    class that will create filenodes only on iteration or call
 

	
 
    The len method doesn't need to create filenodes at all
 
    """
 

	
 
    def __init__(self, current_paths, cs):
 
        self.cs = cs
 
        self.current_paths = current_paths
 

	
 
    def __call__(self):
 
        return [n for n in self]
 

	
 
    def __getslice__(self, i, j):
 
        for p in self.current_paths[i:j]:
 
            yield self.cs.get_node(p)
 

	
 
    def __len__(self):
 
        return len(self.current_paths)
 

	
 
    def __iter__(self):
 
        for p in self.current_paths:
 
            yield self.cs.get_node(p)
 

	
 

	
 
class AddedFileNodesGenerator(NodeGeneratorBase):
 
    """
 
    Class holding Added files for current changeset
 
    """
 
    pass
 

	
 
@@ -308,97 +307,96 @@ class FileNode(Node):
 
            return self.last_changeset.message
 
        raise NodeError("Cannot retrieve message of the file without related "
 
            "changeset attribute")
 

	
 
    @LazyProperty
 
    def last_changeset(self):
 
        if self.changeset:
 
            return self.changeset.get_file_changeset(self.path)
 
        raise NodeError("Cannot retrieve last changeset of the file without "
 
            "related changeset attribute")
 

	
 
    def get_mimetype(self):
 
        """
 
        Mimetype is calculated based on the file's content. If ``_mimetype``
 
        attribute is available, it will be returned (backends which store
 
        mimetypes or can easily recognize them, should set this private
 
        attribute to indicate that type should *NOT* be calculated).
 
        """
 
        if hasattr(self, '_mimetype'):
 
            if (isinstance(self._mimetype, (tuple, list,)) and
 
                len(self._mimetype) == 2):
 
                return self._mimetype
 
            else:
 
                raise NodeError('given _mimetype attribute must be an 2 '
 
                               'element list or tuple')
 

	
 
        mtype, encoding = mimetypes.guess_type(self.name)
 

	
 
        if mtype is None:
 
            if self.is_binary:
 
                mtype = 'application/octet-stream'
 
                encoding = None
 
            else:
 
                mtype = 'text/plain'
 
                encoding = None
 
        return mtype, encoding
 

	
 
    @LazyProperty
 
    def mimetype(self):
 
        """
 
        Wrapper around full mimetype info. It returns only type of fetched
 
        mimetype without the encoding part. use get_mimetype function to fetch
 
        full set of (type,encoding)
 
        """
 
        return self.get_mimetype()[0]
 

	
 
    @LazyProperty
 
    def mimetype_main(self):
 
        return ['', '']
 
        return self.mimetype.split('/')[0]
 

	
 
    @LazyProperty
 
    def lexer(self):
 
        """
 
        Returns pygment's lexer class. Would try to guess lexer taking file's
 
        content, name and mimetype.
 
        """
 

	
 
        try:
 
            lexer = lexers.guess_lexer_for_filename(self.name, self.content, stripnl=False)
 
        except lexers.ClassNotFound:
 
            lexer = lexers.TextLexer(stripnl=False)
 
        # returns first alias
 
        return lexer
 

	
 
    @LazyProperty
 
    def lexer_alias(self):
 
        """
 
        Returns first alias of the lexer guessed for this file.
 
        """
 
        return self.lexer.aliases[0]
 

	
 
    @LazyProperty
 
    def history(self):
 
        """
 
        Returns a list of changeset for this file in which the file was changed
 
        """
 
        if self.changeset is None:
 
            raise NodeError('Unable to get changeset for this FileNode')
 
        return self.changeset.get_file_history(self.path)
 

	
 
    @LazyProperty
 
    def annotate(self):
 
        """
 
        Returns a list of three element tuples with lineno,changeset and line
 
        """
 
        if self.changeset is None:
 
            raise NodeError('Unable to get changeset for this FileNode')
 
        return self.changeset.get_file_annotate(self.path)
 

	
 
    @LazyProperty
 
    def state(self):
 
        if not self.changeset:
 
            raise NodeError("Cannot check state of the node if it's not "
 
                "linked with changeset")
 
        elif self.path in (node.path for node in self.changeset.added):
 
            return NodeState.ADDED
rhodecode/lib/vcs/subprocessio.py
Show inline comments
 
new file 100644
 
'''
 
Module provides a class allowing to wrap communication over subprocess.Popen
 
input, output, error streams into a meaningfull, non-blocking, concurrent
 
stream processor exposing the output data as an iterator fitting to be a
 
return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
 

	
 
Copyright (c) 2011  Daniel Dotsenko <dotsa@hotmail.com>
 

	
 
This file is part of git_http_backend.py Project.
 

	
 
git_http_backend.py Project is free software: you can redistribute it and/or
 
modify it under the terms of the GNU Lesser General Public License as
 
published by the Free Software Foundation, either version 2.1 of the License,
 
or (at your option) any later version.
 

	
 
git_http_backend.py Project is distributed in the hope that it will be useful,
 
but WITHOUT ANY WARRANTY; without even the implied warranty of
 
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
 
GNU Lesser General Public License for more details.
 

	
 
You should have received a copy of the GNU Lesser General Public License
 
along with git_http_backend.py Project.
 
If not, see <http://www.gnu.org/licenses/>.
 
'''
 
import os
 
import subprocess
 
from rhodecode.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
 

	
 

	
 
class StreamFeeder(Thread):
 
    """
 
    Normal writing into pipe-like is blocking once the buffer is filled.
 
    This thread allows a thread to seep data from a file-like into a pipe
 
    without blocking the main thread.
 
    We close inpipe once the end of the source stream is reached.
 
    """
 
    def __init__(self, source):
 
        super(StreamFeeder, self).__init__()
 
        self.daemon = True
 
        filelike = False
 
        self.bytes = _bytes()
 
        if type(source) in (type(''), _bytes, _bytearray):  # string-like
 
            self.bytes = _bytes(source)
 
        else:  # can be either file pointer or file-like
 
            if type(source) in (int, long):  # file pointer it is
 
                ## converting file descriptor (int) stdin into file-like
 
                try:
 
                    source = os.fdopen(source, 'rb', 16384)
 
                except Exception:
 
                    pass
 
            # let's see if source is file-like by now
 
            try:
 
                filelike = source.read
 
            except Exception:
 
                pass
 
        if not filelike and not self.bytes:
 
            raise TypeError("StreamFeeder's source object must be a readable "
 
                            "file-like, a file descriptor, or a string-like.")
 
        self.source = source
 
        self.readiface, self.writeiface = os.pipe()
 

	
 
    def run(self):
 
        t = self.writeiface
 
        if self.bytes:
 
            os.write(t, self.bytes)
 
        else:
 
            s = self.source
 
            b = s.read(4096)
 
            while b:
 
                os.write(t, b)
 
                b = s.read(4096)
 
        os.close(t)
 

	
 
    @property
 
    def output(self):
 
        return self.readiface
 

	
 

	
 
class InputStreamChunker(Thread):
 
    def __init__(self, source, target, buffer_size, chunk_size):
 

	
 
        super(InputStreamChunker, self).__init__()
 

	
 
        self.daemon = True  # die die die.
 

	
 
        self.source = source
 
        self.target = target
 
        self.chunk_count_max = int(buffer_size / chunk_size) + 1
 
        self.chunk_size = chunk_size
 

	
 
        self.data_added = Event()
 
        self.data_added.clear()
 

	
 
        self.keep_reading = Event()
 
        self.keep_reading.set()
 

	
 
        self.EOF = Event()
 
        self.EOF.clear()
 

	
 
        self.go = Event()
 
        self.go.set()
 

	
 
    def stop(self):
 
        self.go.clear()
 
        self.EOF.set()
 
        try:
 
            # this is not proper, but is done to force the reader thread let
 
            # go of the input because, if successful, .close() will send EOF
 
            # down the pipe.
 
            self.source.close()
 
        except:
 
            pass
 

	
 
    def run(self):
 
        s = self.source
 
        t = self.target
 
        cs = self.chunk_size
 
        ccm = self.chunk_count_max
 
        kr = self.keep_reading
 
        da = self.data_added
 
        go = self.go
 

	
 
        try:
 
            b = s.read(cs)
 
        except ValueError:
 
            b = ''
 

	
 
        while b and go.is_set():
 
            if len(t) > ccm:
 
                kr.clear()
 
                kr.wait(2)
 
#                # this only works on 2.7.x and up
 
#                if not kr.wait(10):
 
#                    raise Exception("Timed out while waiting for input to be read.")
 
                # instead we'll use this
 
                if len(t) > ccm + 3:
 
                    raise IOError("Timed out while waiting for input from subprocess.")
 
            t.append(b)
 
            da.set()
 
            b = s.read(cs)
 
        self.EOF.set()
 
        da.set()  # for cases when done but there was no input.
 

	
 

	
 
class BufferedGenerator():
 
    '''
 
    Class behaves as a non-blocking, buffered pipe reader.
 
    Reads chunks of data (through a thread)
 
    from a blocking pipe, and attaches these to an array (Deque) of chunks.
 
    Reading is halted in the thread when max chunks is internally buffered.
 
    The .next() may operate in blocking or non-blocking fashion by yielding
 
    '' if no data is ready
 
    to be sent or by not returning until there is some data to send
 
    When we get EOF from underlying source pipe we raise the marker to raise
 
    StopIteration after the last chunk of data is yielded.
 
    '''
 

	
 
    def __init__(self, source, buffer_size=65536, chunk_size=4096,
 
                 starting_values=[], bottomless=False):
 

	
 
        if bottomless:
 
            maxlen = int(buffer_size / chunk_size)
 
        else:
 
            maxlen = None
 

	
 
        self.data = deque(starting_values, maxlen)
 

	
 
        self.worker = InputStreamChunker(source, self.data, buffer_size,
 
                                         chunk_size)
 
        if starting_values:
 
            self.worker.data_added.set()
 
        self.worker.start()
 

	
 
    ####################
 
    # Generator's methods
 
    ####################
 

	
 
    def __iter__(self):
 
        return self
 

	
 
    def next(self):
 
        while not len(self.data) and not self.worker.EOF.is_set():
 
            self.worker.data_added.clear()
 
            self.worker.data_added.wait(0.2)
 
        if len(self.data):
 
            self.worker.keep_reading.set()
 
            return _bytes(self.data.popleft())
 
        elif self.worker.EOF.is_set():
 
            raise StopIteration
 

	
 
    def throw(self, type, value=None, traceback=None):
 
        if not self.worker.EOF.is_set():
 
            raise type(value)
 

	
 
    def start(self):
 
        self.worker.start()
 

	
 
    def stop(self):
 
        self.worker.stop()
 

	
 
    def close(self):
 
        try:
 
            self.worker.stop()
 
            self.throw(GeneratorExit)
 
        except (GeneratorExit, StopIteration):
 
            pass
 

	
 
    def __del__(self):
 
        self.close()
 

	
 
    ####################
 
    # Threaded reader's infrastructure.
 
    ####################
 
    @property
 
    def input(self):
 
        return self.worker.w
 

	
 
    @property
 
    def data_added_event(self):
 
        return self.worker.data_added
 

	
 
    @property
 
    def data_added(self):
 
        return self.worker.data_added.is_set()
 

	
 
    @property
 
    def reading_paused(self):
 
        return not self.worker.keep_reading.is_set()
 

	
 
    @property
 
    def done_reading_event(self):
 
        '''
 
        Done_reding does not mean that the iterator's buffer is empty.
 
        Iterator might have done reading from underlying source, but the read
 
        chunks might still be available for serving through .next() method.
 

	
 
        @return An Event class instance.
 
        '''
 
        return self.worker.EOF
 

	
 
    @property
 
    def done_reading(self):
 
        '''
 
        Done_reding does not mean that the iterator's buffer is empty.
 
        Iterator might have done reading from underlying source, but the read
 
        chunks might still be available for serving through .next() method.
 

	
 
        @return An Bool value.
 
        '''
 
        return self.worker.EOF.is_set()
 

	
 
    @property
 
    def length(self):
 
        '''
 
        returns int.
 

	
 
        This is the lenght of the que of chunks, not the length of
 
        the combined contents in those chunks.
 

	
 
        __len__() cannot be meaningfully implemented because this
 
        reader is just flying throuh a bottomless pit content and
 
        can only know the lenght of what it already saw.
 

	
 
        If __len__() on WSGI server per PEP 3333 returns a value,
 
        the responce's length will be set to that. In order not to
 
        confuse WSGI PEP3333 servers, we will not implement __len__
 
        at all.
 
        '''
 
        return len(self.data)
 

	
 
    def prepend(self, x):
 
        self.data.appendleft(x)
 

	
 
    def append(self, x):
 
        self.data.append(x)
 

	
 
    def extend(self, o):
 
        self.data.extend(o)
 

	
 
    def __getitem__(self, i):
 
        return self.data[i]
 

	
 

	
 
class SubprocessIOChunker(object):
 
    '''
 
    Processor class wrapping handling of subprocess IO.
 

	
 
    In a way, this is a "communicate()" replacement with a twist.
 

	
 
    - We are multithreaded. Writing in and reading out, err are all sep threads.
 
    - We support concurrent (in and out) stream processing.
 
    - The output is not a stream. It's a queue of read string (bytes, not unicode)
 
      chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
 
    - We are non-blocking in more respects than communicate()
 
      (reading from subprocess out pauses when internal buffer is full, but
 
       does not block the parent calling code. On the flip side, reading from
 
       slow-yielding subprocess may block the iteration until data shows up. This
 
       does not block the parallel inpipe reading occurring parallel thread.)
 

	
 
    The purpose of the object is to allow us to wrap subprocess interactions into
 
    and interable that can be passed to a WSGI server as the application's return
 
    value. Because of stream-processing-ability, WSGI does not have to read ALL
 
    of the subprocess's output and buffer it, before handing it to WSGI server for
 
    HTTP response. Instead, the class initializer reads just a bit of the stream
 
    to figure out if error ocurred or likely to occur and if not, just hands the
 
    further iteration over subprocess output to the server for completion of HTTP
 
    response.
 

	
 
    The real or perceived subprocess error is trapped and raised as one of
 
    EnvironmentError family of exceptions
 

	
 
    Example usage:
 
    #    try:
 
    #        answer = SubprocessIOChunker(
 
    #            cmd,
 
    #            input,
 
    #            buffer_size = 65536,
 
    #            chunk_size = 4096
 
    #            )
 
    #    except (EnvironmentError) as e:
 
    #        print str(e)
 
    #        raise e
 
    #
 
    #    return answer
 

	
 

	
 
    '''
 
    def __init__(self, cmd, inputstream=None, buffer_size=65536,
 
                 chunk_size=4096, starting_values=[], **kwargs):
 
        '''
 
        Initializes SubprocessIOChunker
 

	
 
        :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
 
        :param inputstream: (Default: None) A file-like, string, or file pointer.
 
        :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
 
        :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
 
        :param starting_values: (Default: []) An array of strings to put in front of output que.
 
        '''
 

	
 
        if inputstream:
 
            input_streamer = StreamFeeder(inputstream)
 
            input_streamer.start()
 
            inputstream = input_streamer.output
 

	
 
        if isinstance(cmd, (list, tuple)):
 
            cmd = ' '.join(cmd)
 

	
 
        _shell = kwargs.get('shell') or True
 
        kwargs['shell'] = _shell
 
        _p = subprocess.Popen(cmd,
 
            bufsize=-1,
 
            stdin=inputstream,
 
            stdout=subprocess.PIPE,
 
            stderr=subprocess.PIPE,
 
            **kwargs
 
            )
 

	
 
        bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size, starting_values)
 
        bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
 

	
 
        while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
 
            # doing this until we reach either end of file, or end of buffer.
 
            bg_out.data_added_event.wait(1)
 
            bg_out.data_added_event.clear()
 

	
 
        # at this point it's still ambiguous if we are done reading or just full buffer.
 
        # Either way, if error (returned by ended process, or implied based on
 
        # presence of stuff in stderr output) we error out.
 
        # Else, we are happy.
 
        _returncode = _p.poll()
 
        if _returncode or (_returncode == None and bg_err.length):
 
            try:
 
                _p.terminate()
 
            except:
 
                pass
 
            bg_out.stop()
 
            bg_err.stop()
 
            err = '%s' % ''.join(bg_err)
 
            if err:
 
                raise EnvironmentError("Subprocess exited due to an error:\n" + err)
 
            raise EnvironmentError("Subprocess exited with non 0 ret code:%s" % _returncode)
 

	
 
        self.process = _p
 
        self.output = bg_out
 
        self.error = bg_err
 

	
 
    def __iter__(self):
 
        return self
 

	
 
    def next(self):
 
        if self.process.poll():
 
            err = '%s' % ''.join(self.error)
 
            raise EnvironmentError("Subprocess exited due to an error:\n" + err)
 
        return self.output.next()
 

	
 
    def throw(self, type, value=None, traceback=None):
 
        if self.output.length or not self.output.done_reading:
 
            raise type(value)
 

	
 
    def close(self):
 
        try:
 
            self.process.terminate()
 
        except:
 
            pass
 
        try:
 
            self.output.close()
 
        except:
 
            pass
 
        try:
 
            self.error.close()
 
        except:
 
            pass
 

	
 
    def __del__(self):
 
        self.close()
rhodecode/lib/vcs/utils/__init__.py
Show inline comments
 
"""
 
This module provides some useful tools for ``vcs`` like annotate/diff html
 
output. It also includes some internal helpers.
 
"""
 
import sys
 
import time
 
import datetime
 

	
 

	
 
def makedate():
 
    lt = time.localtime()
 
    if lt[8] == 1 and time.daylight:
 
        tz = time.altzone
 
    else:
 
        tz = time.timezone
 
    return time.mktime(lt), tz
 

	
 

	
 
def aslist(obj, sep=None, strip=True):
 
    """
 
    Returns given string separated by sep as list
 

	
 
    :param obj:
 
    :param sep:
 
    :param strip:
 
    """
 
    if isinstance(obj, (basestring)):
 
        lst = obj.split(sep)
 
        if strip:
 
            lst = [v.strip() for v in lst]
 
        return lst
 
    elif isinstance(obj, (list, tuple)):
 
        return obj
 
    elif obj is None:
 
        return []
 
    else:
 
        return [obj]
 

	
 

	
 
def date_fromtimestamp(unixts, tzoffset=0):
 
    """
 
    Makes a local datetime object out of unix timestamp
 

	
 
    :param unixts:
 
    :param tzoffset:
 
    """
 

	
 
    return datetime.datetime.fromtimestamp(float(unixts))
 

	
 

	
 
def safe_int(val, default=None):
 
    """
 
    Returns int() of val if val is not convertable to int use default
 
    instead
 

	
 
    :param val:
 
    :param default:
 
    """
 

	
 
    try:
 
        val = int(val)
 
    except (ValueError, TypeError):
 
        val = default
 

	
 
    return val
 

	
 

	
 
def safe_unicode(str_, from_encoding=None):
 
    """
 
    safe unicode function. Does few trick to turn str_ into unicode
 

	
 
    In case of UnicodeDecode error we try to return it with encoding detected
 
    by chardet library if it fails fallback to unicode with errors replaced
 

	
 
    :param str_: string to decode
 
    :rtype: unicode
 
    :returns: unicode object
 
    """
 
    from rhodecode.lib.utils2 import safe_unicode
 
    return safe_unicode(str_, from_encoding)
 

	
 
    if isinstance(str_, unicode):
 
        return str_
 

	
 
    if not from_encoding:
 
        from rhodecode.lib.vcs.conf import settings
 
        from_encoding = settings.DEFAULT_ENCODINGS
 

	
 
    if not isinstance(from_encoding, (list, tuple)):
 
        from_encoding = [from_encoding]
 

	
 
    try:
 
        return unicode(str_)
 
    except UnicodeDecodeError:
 
        pass
 

	
 
    for enc in from_encoding:
 
    try:
 
        return unicode(str_, from_encoding)
 
            return unicode(str_, enc)
 
    except UnicodeDecodeError:
 
        pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(str_)['encoding']
 
        if encoding is None:
 
            raise Exception()
 
        return str_.decode(encoding)
 
    except (ImportError, UnicodeDecodeError, Exception):
 
        return unicode(str_, from_encoding, 'replace')
 
        return unicode(str_, from_encoding[0], 'replace')
 

	
 

	
 
def safe_str(unicode_, to_encoding=None):
 
    """
 
    safe str function. Does few trick to turn unicode_ into string
 

	
 
    In case of UnicodeEncodeError we try to return it with encoding detected
 
    by chardet library if it fails fallback to string with errors replaced
 

	
 
    :param unicode_: unicode to encode
 
    :rtype: str
 
    :returns: str object
 
    """
 
    from rhodecode.lib.utils2 import safe_str
 
    return safe_str(unicode_, to_encoding)
 

	
 
    # if it's not basestr cast to str
 
    if not isinstance(unicode_, basestring):
 
        return str(unicode_)
 

	
 
    if isinstance(unicode_, str):
 
        return unicode_
 

	
 
    if not to_encoding:
 
        from rhodecode.lib.vcs.conf import settings
 
        to_encoding = settings.DEFAULT_ENCODINGS
 

	
 
    if not isinstance(to_encoding, (list, tuple)):
 
        to_encoding = [to_encoding]
 

	
 
    for enc in to_encoding:
 
    try:
 
        return unicode_.encode(to_encoding)
 
            return unicode_.encode(enc)
 
    except UnicodeEncodeError:
 
        pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(unicode_)['encoding']
 
        if encoding is None:
 
            raise UnicodeEncodeError()
 

	
 
        return unicode_.encode(encoding)
 
    except (ImportError, UnicodeEncodeError):
 
        return unicode_.encode(to_encoding, 'replace')
 
        return unicode_.encode(to_encoding[0], 'replace')
 

	
 
    return safe_str
 

	
 

	
 
def author_email(author):
 
    """
 
    returns email address of given author.
 
    If any of <,> sign are found, it fallbacks to regex findall()
 
    and returns first found result or empty string
 

	
 
    Regex taken from http://www.regular-expressions.info/email.html
 
    """
 
    import re
 
    r = author.find('>')
 
    l = author.find('<')
 

	
 
    if l == -1 or r == -1:
 
        # fallback to regex match of email out of a string
 
        email_re = re.compile(r"""[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!"""
 
                              r"""#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z"""
 
                              r"""0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]"""
 
                              r"""*[a-z0-9])?""", re.IGNORECASE)
 
        m = re.findall(email_re, author)
 
        return m[0] if m else ''
 

	
 
    return author[l + 1:r].strip()
 

	
 

	
 
def author_name(author):
 
    """
 
    get name of author, or else username.
 
    It'll try to find an email in the author string and just cut it off
 
    to get the username
 
    """
 

	
 
    if not '@' in author:
 
        return author
 
    else:
 
        return author.replace(author_email(author), '').replace('<', '')\
 
            .replace('>', '').strip()
rhodecode/lib/vcs/utils/annotate.py
Show inline comments
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.nodes import FileNode
 
import StringIO
 

	
 
from pygments.formatters import HtmlFormatter
 
from pygments import highlight
 

	
 
import StringIO
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.nodes import FileNode
 

	
 

	
 
def annotate_highlight(filenode, annotate_from_changeset_func=None,
 
        order=None, headers=None, **options):
 
    """
 
    Returns html portion containing annotated table with 3 columns: line
 
    numbers, changeset information and pygmentized line of code.
 

	
 
    :param filenode: FileNode object
 
    :param annotate_from_changeset_func: function taking changeset and
 
      returning single annotate cell; needs break line at the end
 
    :param order: ordered sequence of ``ls`` (line numbers column),
 
      ``annotate`` (annotate column), ``code`` (code column); Default is
 
      ``['ls', 'annotate', 'code']``
 
    :param headers: dictionary with headers (keys are whats in ``order``
 
      parameter)
 
    """
 
    options['linenos'] = True
 
    formatter = AnnotateHtmlFormatter(filenode=filenode, order=order,
 
        headers=headers,
 
        annotate_from_changeset_func=annotate_from_changeset_func, **options)
 
    lexer = filenode.lexer
 
    highlighted = highlight(filenode.content, lexer, formatter)
 
    return highlighted
 

	
 

	
 
class AnnotateHtmlFormatter(HtmlFormatter):
 

	
 
    def __init__(self, filenode, annotate_from_changeset_func=None,
 
            order=None, **options):
 
        """
 
        If ``annotate_from_changeset_func`` is passed it should be a function
 
        which returns string from the given changeset. For example, we may pass
 
        following function as ``annotate_from_changeset_func``::
 

	
 
            def changeset_to_anchor(changeset):
 
                return '<a href="/changesets/%s/">%s</a>\n' %\
 
                       (changeset.id, changeset.id)
 

	
 
        :param annotate_from_changeset_func: see above
 
        :param order: (default: ``['ls', 'annotate', 'code']``); order of
 
          columns;
 
        :param options: standard pygment's HtmlFormatter options, there is
 
          extra option tough, ``headers``. For instance we can pass::
 

	
 
             formatter = AnnotateHtmlFormatter(filenode, headers={
 
                'ls': '#',
 
                'annotate': 'Annotate',
rhodecode/lib/vcs/utils/compat.py
Show inline comments
 
"""
 
Various utilities to work with Python < 2.7.
 

	
 
Those utilities may be deleted once ``vcs`` stops support for older Python
 
versions.
 
"""
 
import sys
 

	
 
import array
 

	
 
if sys.version_info >= (2, 7):
 
    unittest = __import__('unittest')
 
else:
 
    unittest = __import__('unittest2')
 

	
 

	
 
if sys.version_info >= (2, 6):
 
    _bytes = bytes
 
else:
 
    # in py2.6 bytes is a synonim for str
 
    _bytes = str
 

	
 
if sys.version_info >= (2, 6):
 
    _bytearray = bytearray
 
else:
 
    # no idea if this is correct but all integration tests are passing
 
    # i think we never use bytearray anyway
 
    _bytearray = array
 

	
 
if sys.version_info >= (2, 6):
 
    from collections import deque
 
else:
 
    #need to implement our own deque with maxlen
 
    class deque(object):
 

	
 
        def __init__(self, iterable=(), maxlen= -1):
 
            if not hasattr(self, 'data'):
 
                self.left = self.right = 0
 
                self.data = {}
 
            self.maxlen = maxlen or -1
 
            self.extend(iterable)
 

	
 
        def append(self, x):
 
            self.data[self.right] = x
 
            self.right += 1
 
            if self.maxlen != -1 and len(self) > self.maxlen:
 
                self.popleft()
 

	
 
        def appendleft(self, x):
 
            self.left -= 1
 
            self.data[self.left] = x
 
            if self.maxlen != -1 and len(self) > self.maxlen:
 
                self.pop()
 

	
 
        def pop(self):
 
            if self.left == self.right:
 
                raise IndexError('cannot pop from empty deque')
 
            self.right -= 1
 
            elem = self.data[self.right]
 
            del self.data[self.right]
 
            return elem
 

	
 
        def popleft(self):
 
            if self.left == self.right:
 
                raise IndexError('cannot pop from empty deque')
 
            elem = self.data[self.left]
 
            del self.data[self.left]
 
            self.left += 1
 
            return elem
 

	
 
        def clear(self):
 
            self.data.clear()
 
            self.left = self.right = 0
 

	
 
        def extend(self, iterable):
 
            for elem in iterable:
 
                self.append(elem)
 

	
 
        def extendleft(self, iterable):
 
            for elem in iterable:
 
                self.appendleft(elem)
 

	
 
        def rotate(self, n=1):
 
            if self:
 
                n %= len(self)
 
                for i in xrange(n):
 
                    self.appendleft(self.pop())
 

	
 
        def __getitem__(self, i):
 
            if i < 0:
 
                i += len(self)
 
            try:
 
                return self.data[i + self.left]
 
            except KeyError:
 
                raise IndexError
 

	
 
        def __setitem__(self, i, value):
 
            if i < 0:
 
                i += len(self)
 
            try:
 
                self.data[i + self.left] = value
 
            except KeyError:
 
                raise IndexError
 

	
 
        def __delitem__(self, i):
 
            size = len(self)
 
            if not (-size <= i < size):
 
                raise IndexError
 
            data = self.data
 
            if i < 0:
 
                i += size
 
            for j in xrange(self.left + i, self.right - 1):
 
                data[j] = data[j + 1]
 
            self.pop()
 

	
 
        def __len__(self):
 
            return self.right - self.left
 

	
 
        def __cmp__(self, other):
 
            if type(self) != type(other):
 
                return cmp(type(self), type(other))
 
            return cmp(list(self), list(other))
 

	
 
        def __repr__(self, _track=[]):
 
            if id(self) in _track:
 
                return '...'
 
            _track.append(id(self))
 
            r = 'deque(%r, maxlen=%s)' % (list(self), self.maxlen)
 
            _track.remove(id(self))
 
            return r
 

	
 
        def __getstate__(self):
 
            return (tuple(self),)
 

	
 
        def __setstate__(self, s):
 
            self.__init__(s[0])
 

	
 
        def __hash__(self):
 
            raise TypeError
 

	
 
        def __copy__(self):
 
            return self.__class__(self)
 

	
 
        def __deepcopy__(self, memo={}):
 
            from copy import deepcopy
 
            result = self.__class__()
 
            memo[id(self)] = result
 
            result.__init__(deepcopy(tuple(self), memo))
 
            return result
 

	
 

	
 
#==============================================================================
 
# threading.Event
 
#==============================================================================
 

	
 
if sys.version_info >= (2, 6):
 
    from threading import Event, Thread
 
else:
 
    from threading import _Verbose, Lock, Thread, _time, \
 
        _allocate_lock, RLock, _sleep
 

	
 
    def Condition(*args, **kwargs):
 
        return _Condition(*args, **kwargs)
 

	
 
    class _Condition(_Verbose):
 

	
 
        def __init__(self, lock=None, verbose=None):
 
            _Verbose.__init__(self, verbose)
 
            if lock is None:
 
                lock = RLock()
 
            self.__lock = lock
 
            # Export the lock's acquire() and release() methods
 
            self.acquire = lock.acquire
 
            self.release = lock.release
 
            # If the lock defines _release_save() and/or _acquire_restore(),
 
            # these override the default implementations (which just call
 
            # release() and acquire() on the lock).  Ditto for _is_owned().
 
            try:
 
                self._release_save = lock._release_save
 
            except AttributeError:
 
                pass
 
            try:
 
                self._acquire_restore = lock._acquire_restore
 
            except AttributeError:
 
                pass
 
            try:
 
                self._is_owned = lock._is_owned
 
            except AttributeError:
 
                pass
 
            self.__waiters = []
 

	
 
        def __enter__(self):
 
            return self.__lock.__enter__()
 

	
 
        def __exit__(self, *args):
 
            return self.__lock.__exit__(*args)
 

	
 
        def __repr__(self):
 
            return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
 

	
 
        def _release_save(self):
 
            self.__lock.release()           # No state to save
 

	
 
        def _acquire_restore(self, x):
 
            self.__lock.acquire()           # Ignore saved state
 

	
 
        def _is_owned(self):
 
            # Return True if lock is owned by current_thread.
 
            # This method is called only if __lock doesn't have _is_owned().
 
            if self.__lock.acquire(0):
 
                self.__lock.release()
 
                return False
 
            else:
 
                return True
 

	
 
        def wait(self, timeout=None):
 
            if not self._is_owned():
 
                raise RuntimeError("cannot wait on un-acquired lock")
 
            waiter = _allocate_lock()
 
            waiter.acquire()
 
            self.__waiters.append(waiter)
 
            saved_state = self._release_save()
 
            try:    # restore state no matter what (e.g., KeyboardInterrupt)
 
                if timeout is None:
 
                    waiter.acquire()
 
                    if __debug__:
 
                        self._note("%s.wait(): got it", self)
 
                else:
 
                    # Balancing act:  We can't afford a pure busy loop, so we
 
                    # have to sleep; but if we sleep the whole timeout time,
 
                    # we'll be unresponsive.  The scheme here sleeps very
 
                    # little at first, longer as time goes on, but never longer
 
                    # than 20 times per second (or the timeout time remaining).
 
                    endtime = _time() + timeout
 
                    delay = 0.0005 # 500 us -> initial delay of 1 ms
 
                    while True:
 
                        gotit = waiter.acquire(0)
 
                        if gotit:
 
                            break
 
                        remaining = endtime - _time()
 
                        if remaining <= 0:
 
                            break
 
                        delay = min(delay * 2, remaining, .05)
 
                        _sleep(delay)
 
                    if not gotit:
 
                        if __debug__:
 
                            self._note("%s.wait(%s): timed out", self, timeout)
 
                        try:
 
                            self.__waiters.remove(waiter)
 
                        except ValueError:
 
                            pass
 
                    else:
 
                        if __debug__:
 
                            self._note("%s.wait(%s): got it", self, timeout)
 
            finally:
 
                self._acquire_restore(saved_state)
 

	
 
        def notify(self, n=1):
 
            if not self._is_owned():
 
                raise RuntimeError("cannot notify on un-acquired lock")
 
            __waiters = self.__waiters
 
            waiters = __waiters[:n]
 
            if not waiters:
 
                if __debug__:
 
                    self._note("%s.notify(): no waiters", self)
 
                return
 
            self._note("%s.notify(): notifying %d waiter%s", self, n,
 
                       n != 1 and "s" or "")
 
            for waiter in waiters:
 
                waiter.release()
 
                try:
 
                    __waiters.remove(waiter)
 
                except ValueError:
 
                    pass
 

	
 
        def notifyAll(self):
 
            self.notify(len(self.__waiters))
 

	
 
        notify_all = notifyAll
 

	
 
    def Event(*args, **kwargs):
 
        return _Event(*args, **kwargs)
 

	
 
    class _Event(_Verbose):
 

	
 
        # After Tim Peters' event class (without is_posted())
 

	
 
        def __init__(self, verbose=None):
 
            _Verbose.__init__(self, verbose)
 
            self.__cond = Condition(Lock())
 
            self.__flag = False
 

	
 
        def isSet(self):
 
            return self.__flag
 

	
 
        is_set = isSet
 

	
 
        def set(self):
 
            self.__cond.acquire()
 
            try:
 
                self.__flag = True
 
                self.__cond.notify_all()
 
            finally:
 
                self.__cond.release()
 

	
 
        def clear(self):
 
            self.__cond.acquire()
 
            try:
 
                self.__flag = False
 
            finally:
 
                self.__cond.release()
 

	
 
        def wait(self, timeout=None):
 
            self.__cond.acquire()
 
            try:
 
                if not self.__flag:
 
                    self.__cond.wait(timeout)
 
            finally:
 
                self.__cond.release()
rhodecode/lib/vcs/utils/diffs.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# original copyright: 2007-2008 by Armin Ronacher
 
# licensed under the BSD license.
 

	
 
import re
 
import difflib
 
import logging
 
import itertools
 

	
 
from difflib import unified_diff
 
from itertools import tee, imap
 

	
 
from mercurial.match import match
 

	
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.nodes import FileNode, NodeError
 
from rhodecode.lib.vcs.utils import safe_unicode
 

	
 

	
 
def get_udiff(filenode_old, filenode_new, show_whitespace=True):
 
    """
 
    Returns unified diff between given ``filenode_old`` and ``filenode_new``.
 
    """
 
    try:
 
        filenode_old_date = filenode_old.changeset.date
 
    except NodeError:
 
        filenode_old_date = None
 

	
 
    try:
 
        filenode_new_date = filenode_new.changeset.date
 
    except NodeError:
 
        filenode_new_date = None
 

	
 
    for filenode in (filenode_old, filenode_new):
 
        if not isinstance(filenode, FileNode):
 
            raise VCSError("Given object should be FileNode object, not %s"
 
                % filenode.__class__)
 

	
 
    if filenode_old_date and filenode_new_date:
 
        if not filenode_old_date < filenode_new_date:
 
            logging.debug("Generating udiff for filenodes with not increasing "
 
                "dates")
 

	
 
    vcs_udiff = unified_diff(filenode_old.content.splitlines(True),
 
                               filenode_new.content.splitlines(True),
 
                               filenode_old.name,
 
                               filenode_new.name,
 
                               filenode_old_date,
 
                               filenode_old_date)
 
    return vcs_udiff
 

	
 

	
 
def get_gitdiff(filenode_old, filenode_new, ignore_whitespace=True):
 
    """
 
    Returns git style diff between given ``filenode_old`` and ``filenode_new``.
 

	
 
    :param ignore_whitespace: ignore whitespaces in diff
 
    """
 

	
 
    for filenode in (filenode_old, filenode_new):
 
        if not isinstance(filenode, FileNode):
 
            raise VCSError("Given object should be FileNode object, not %s"
 
                % filenode.__class__)
 

	
 
    old_raw_id = getattr(filenode_old.changeset, 'raw_id', '0' * 40)
 
    new_raw_id = getattr(filenode_new.changeset, 'raw_id', '0' * 40)
 

	
 
    repo = filenode_new.changeset.repository
 
    vcs_gitdiff = repo._get_diff(old_raw_id, new_raw_id, filenode_new.path,
 
    vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
 
                                 ignore_whitespace)
 

	
 
    return vcs_gitdiff
 

	
 

	
 
class DiffProcessor(object):
 
    """
 
    Give it a unified diff and it returns a list of the files that were
 
    mentioned in the diff together with a dict of meta information that
 
    can be used to render it in a HTML template.
 
    """
 
    _chunk_re = re.compile(r'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@(.*)')
 

	
 
    def __init__(self, diff, differ='diff', format='udiff'):
 
        """
 
        :param diff:   a text in diff format or generator
 
        :param format: format of diff passed, `udiff` or `gitdiff`
 
        """
 
        if isinstance(diff, basestring):
 
            diff = [diff]
 

	
 
        self.__udiff = diff
 
        self.__format = format
 
        self.adds = 0
 
        self.removes = 0
 

	
 
        if isinstance(self.__udiff, basestring):
 
            self.lines = iter(self.__udiff.splitlines(1))
 

	
 
        elif self.__format == 'gitdiff':
 
            udiff_copy = self.copy_iterator()
 
            self.lines = imap(self.escaper, self._parse_gitdiff(udiff_copy))
 
            self.lines = itertools.imap(self.escaper,
 
                                        self._parse_gitdiff(udiff_copy))
 
        else:
 
            udiff_copy = self.copy_iterator()
 
            self.lines = imap(self.escaper, udiff_copy)
 
            self.lines = itertools.imap(self.escaper, udiff_copy)
 

	
 
        # Select a differ.
 
        if differ == 'difflib':
 
            self.differ = self._highlight_line_difflib
 
        else:
 
            self.differ = self._highlight_line_udiff
 

	
 
    def escaper(self, string):
 
        return string.replace('<', '&lt;').replace('>', '&gt;')
 

	
 
    def copy_iterator(self):
 
        """
 
        make a fresh copy of generator, we should not iterate thru
 
        an original as it's needed for repeating operations on
 
        this instance of DiffProcessor
 
        """
 
        self.__udiff, iterator_copy = tee(self.__udiff)
 
        self.__udiff, iterator_copy = itertools.tee(self.__udiff)
 
        return iterator_copy
 

	
 
    def _extract_rev(self, line1, line2):
 
        """
 
        Extract the filename and revision hint from a line.
 
        """
 

	
 
        try:
 
            if line1.startswith('--- ') and line2.startswith('+++ '):
 
                l1 = line1[4:].split(None, 1)
 
                old_filename = l1[0].lstrip('a/') if len(l1) >= 1 else None
 
                old_rev = l1[1] if len(l1) == 2 else 'old'
 

	
 
                l2 = line2[4:].split(None, 1)
 
                new_filename = l2[0].lstrip('b/') if len(l1) >= 1 else None
 
                new_rev = l2[1] if len(l2) == 2 else 'new'
 

	
 
                filename = old_filename if (old_filename !=
 
                                            'dev/null') else new_filename
 

	
 
                return filename, new_rev, old_rev
 
        except (ValueError, IndexError):
 
            pass
 

	
 
        return None, None, None
 

	
 
    def _parse_gitdiff(self, diffiterator):
 
        def line_decoder(l):
 
            if l.startswith('+') and not l.startswith('+++'):
 
                self.adds += 1
 
            elif l.startswith('-') and not l.startswith('---'):
 
                self.removes += 1
 
            return safe_unicode(l)
 

	
 
        output = list(diffiterator)
 
        size = len(output)
 

	
 
        if size == 2:
 
            l = []
 
            l.extend([output[0]])
 
            l.extend(output[1].splitlines(1))
 
            return map(line_decoder, l)
 
        elif size == 1:
 
            return  map(line_decoder, output[0].splitlines(1))
 
        elif size == 0:
 
            return []
 

	
 
        raise Exception('wrong size of diff %s' % size)
rhodecode/lib/vcs/utils/helpers.py
Show inline comments
 
"""
 
Utitlites aimed to help achieve mostly basic tasks.
 
"""
 
from __future__ import division
 

	
 
import re
 
import os
 
import time
 
import datetime
 
import os.path
 
from subprocess import Popen, PIPE
 

	
 
from rhodecode.lib.vcs.exceptions import VCSError
 
from rhodecode.lib.vcs.exceptions import RepositoryError
 
from rhodecode.lib.vcs.utils.paths import abspath
 

	
 
ALIASES = ['hg', 'git']
 

	
 

	
 
def get_scm(path, search_up=False, explicit_alias=None):
 
    """
 
    Returns one of alias from ``ALIASES`` (in order of precedence same as
 
    shortcuts given in ``ALIASES``) and top working dir path for the given
 
    argument. If no scm-specific directory is found or more than one scm is
 
    found at that directory, ``VCSError`` is raised.
 

	
 
    :param search_up: if set to ``True``, this function would try to
 
      move up to parent directory every time no scm is recognized for the
 
      currently checked path. Default: ``False``.
 
    :param explicit_alias: can be one of available backend aliases, when given
 
      it will return given explicit alias in repositories under more than one
 
      version control, if explicit_alias is different than found it will raise
 
      VCSError
 
    """
 
    if not os.path.isdir(path):
 
        raise VCSError("Given path %s is not a directory" % path)
 

	
 
    def get_scms(path):
 
        return [(scm, path) for scm in get_scms_for_path(path)]
 

	
 
    found_scms = get_scms(path)
 
    while not found_scms and search_up:
 
        newpath = abspath(path, '..')
 
        if newpath == path:
 
            break
 
        path = newpath
 
        found_scms = get_scms(path)
 

	
 
    if len(found_scms) > 1:
 
        for scm in found_scms:
 
            if scm[0] == explicit_alias:
 
                return scm
 
        raise VCSError('More than one [%s] scm found at given path %s'
 
                       % (','.join((x[0] for x in found_scms)), path))
 

	
 
    if len(found_scms) is 0:
 
        raise VCSError('No scm found at given path %s' % path)
 

	
 
    return found_scms[0]
 

	
 
@@ -118,96 +119,97 @@ def get_highlighted_code(name, code, typ
 
        pygments
 
    except ImportError:
 
        return code
 
    from pygments import highlight
 
    from pygments.lexers import guess_lexer_for_filename, ClassNotFound
 
    from pygments.formatters import TerminalFormatter
 

	
 
    try:
 
        lexer = guess_lexer_for_filename(name, code)
 
        formatter = TerminalFormatter()
 
        content = highlight(code, lexer, formatter)
 
    except ClassNotFound:
 
        logging.debug("Couldn't guess Lexer, will not use pygments.")
 
        content = code
 
    return content
 

	
 

	
 
def parse_changesets(text):
 
    """
 
    Returns dictionary with *start*, *main* and *end* ids.
 

	
 
    Examples::
 

	
 
        >>> parse_changesets('aaabbb')
 
        {'start': None, 'main': 'aaabbb', 'end': None}
 
        >>> parse_changesets('aaabbb..cccddd')
 
        {'start': 'aaabbb', 'main': None, 'end': 'cccddd'}
 

	
 
    """
 
    text = text.strip()
 
    CID_RE = r'[a-zA-Z0-9]+'
 
    if not '..' in text:
 
        m = re.match(r'^(?P<cid>%s)$' % CID_RE, text)
 
        if m:
 
            return {
 
                'start': None,
 
                'main': text,
 
                'end': None,
 
            }
 
    else:
 
        RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE)
 
        m = re.match(RE, text)
 
        if m:
 
            result = m.groupdict()
 
            result['main'] = None
 
            return result
 
    raise ValueError("IDs not recognized")
 

	
 

	
 
def parse_datetime(text):
 
    """
 
    Parses given text and returns ``datetime.datetime`` instance or raises
 
    ``ValueError``.
 

	
 
    :param text: string of desired date/datetime or something more verbose,
 
      like *yesterday*, *2weeks 3days*, etc.
 
    """
 

	
 
    text = text.strip().lower()
 

	
 
    INPUT_FORMATS = (
 
        '%Y-%m-%d %H:%M:%S',
 
        '%Y-%m-%d %H:%M',
 
        '%Y-%m-%d',
 
        '%m/%d/%Y %H:%M:%S',
 
        '%m/%d/%Y %H:%M',
 
        '%m/%d/%Y',
 
        '%m/%d/%y %H:%M:%S',
 
        '%m/%d/%y %H:%M',
 
        '%m/%d/%y',
 
    )
 
    for format in INPUT_FORMATS:
 
        try:
 
            return datetime.datetime(*time.strptime(text, format)[:6])
 
        except ValueError:
 
            pass
 

	
 
    # Try descriptive texts
 
    if text == 'tomorrow':
 
        future = datetime.datetime.now() + datetime.timedelta(days=1)
 
        args = future.timetuple()[:3] + (23, 59, 59)
 
        return datetime.datetime(*args)
 
    elif text == 'today':
 
        return datetime.datetime(*datetime.datetime.today().timetuple()[:3])
 
    elif text == 'now':
 
        return datetime.datetime.now()
 
    elif text == 'yesterday':
 
        past = datetime.datetime.now() - datetime.timedelta(days=1)
 
        return datetime.datetime(*past.timetuple()[:3])
 
    else:
 
        days = 0
 
        matched = re.match(
 
            r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text)
 
        if matched:
 
            groupdict = matched.groupdict()
 
            if groupdict['days']:
 
                days += int(matched.groupdict()['days'])
rhodecode/lib/vcs/utils/hgcompat.py
Show inline comments
 
"""
 
Mercurial libs compatibility
 
"""
 

	
 
from mercurial import archival, merge as hg_merge, patch, ui
 
from mercurial.commands import clone, nullid, pull
 
from mercurial.context import memctx, memfilectx
 
from mercurial.error import RepoError, RepoLookupError, Abort
 
from mercurial.hgweb.common import get_contact
 
from mercurial.localrepo import localrepository
 
from mercurial.match import match
 
from mercurial.mdiff import diffopts
 
from mercurial.node import hex
 
from mercurial.encoding import tolocal
 
from mercurial import discovery
 
from mercurial import localrepo
 
from mercurial import scmutil
 
from mercurial.discovery import findcommonoutgoing
 

	
 
from mercurial.util import url as hg_url
 

	
 
# those authnadlers are patched for python 2.6.5 bug an
 
# infinit looping when given invalid resources
 
from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
rhodecode/lib/vcs/utils/progressbar.py
Show inline comments
 
# encoding: UTF-8
 
import sys
 
import datetime
 
from string import Template
 
import string
 

	
 
from rhodecode.lib.vcs.utils.filesize import filesizeformat
 
from rhodecode.lib.vcs.utils.helpers import get_total_seconds
 

	
 

	
 
class ProgressBarError(Exception):
 
    pass
 

	
 
class AlreadyFinishedError(ProgressBarError):
 
    pass
 

	
 

	
 
class ProgressBar(object):
 

	
 
    default_elements = ['percentage', 'bar', 'steps']
 

	
 
    def __init__(self, steps=100, stream=None, elements=None):
 
        self.step = 0
 
        self.steps = steps
 
        self.stream = stream or sys.stderr
 
        self.bar_char = '='
 
        self.width = 50
 
        self.separator = ' | '
 
        self.elements = elements or self.default_elements
 
        self.started = None
 
        self.finished = False
 
        self.steps_label = 'Step'
 
        self.time_label = 'Time'
 
        self.eta_label = 'ETA'
 
        self.speed_label = 'Speed'
 
        self.transfer_label = 'Transfer'
 

	
 
    def __str__(self):
 
        return self.get_line()
 

	
 
    def __iter__(self):
 
        start = self.step
 
        end = self.steps + 1
 
        for x in xrange(start, end):
 
            self.render(x)
 
            yield x
 

	
 
    def get_separator(self):
 
        return self.separator
 

	
 
    def get_bar_char(self):
 
        return self.bar_char
 

	
 
    def get_bar(self):
 
        char = self.get_bar_char()
 
        perc = self.get_percentage()
 
        length = int(self.width * perc / 100)
 
        bar = char * length
 
        bar = bar.ljust(self.width)
 
        return bar
 

	
 
    def get_elements(self):
 
        return self.elements
 

	
 
    def get_template(self):
 
        separator = self.get_separator()
 
        elements = self.get_elements()
 
        return Template(separator.join((('$%s' % e) for e in elements)))
 
        return string.Template(separator.join((('$%s' % e) for e in elements)))
 

	
 
    def get_total_time(self, current_time=None):
 
        if current_time is None:
 
            current_time = datetime.datetime.now()
 
        if not self.started:
 
            return datetime.timedelta()
 
        return current_time - self.started
 

	
 
    def get_rendered_total_time(self):
 
        delta = self.get_total_time()
 
        if not delta:
 
            ttime = '-'
 
        else:
 
            ttime = str(delta)
 
        return '%s %s' % (self.time_label, ttime)
 

	
 
    def get_eta(self, current_time=None):
 
        if current_time is None:
 
            current_time = datetime.datetime.now()
 
        if self.step == 0:
 
            return datetime.timedelta()
 
        total_seconds = get_total_seconds(self.get_total_time())
 
        eta_seconds = total_seconds * self.steps / self.step - total_seconds
 
        return datetime.timedelta(seconds=int(eta_seconds))
 

	
 
    def get_rendered_eta(self):
 
        eta = self.get_eta()
 
        if not eta:
 
            eta = '--:--:--'
 
        else:
 
            eta = str(eta).rjust(8)
 
        return '%s: %s' % (self.eta_label, eta)
 

	
 
    def get_percentage(self):
 
        return float(self.step) / self.steps * 100
 

	
 
    def get_rendered_percentage(self):
 
        perc = self.get_percentage()
 
        return ('%s%%' % (int(perc))).rjust(5)
 

	
 
    def get_rendered_steps(self):
 
        return '%s: %s/%s' % (self.steps_label, self.step, self.steps)
 

	
 
    def get_rendered_speed(self, step=None, total_seconds=None):
 
        if step is None:
 
            step = self.step
 
        if total_seconds is None:
 
            total_seconds = get_total_seconds(self.get_total_time())
rhodecode/tests/__init__.py
Show inline comments
 
"""Pylons application test package
 

	
 
This package assumes the Pylons environment is already loaded, such as
 
when this script is imported from the `nosetests --with-pylons=test.ini`
 
command.
 

	
 
This module initializes the application via ``websetup`` (`paster
 
setup-app`) and provides the base testing objects.
 

	
 
nosetests -x - fail on first error
 
nosetests rhodecode.tests.functional.test_admin_settings:TestSettingsController.test_my_account
 
nosetests --pdb --pdb-failures
 
nosetests --with-coverage --cover-package=rhodecode.model.validators rhodecode.tests.test_validators
 

	
 
optional FLAGS:
 
    RC_WHOOSH_TEST_DISABLE=1 - skip whoosh index building and tests
 
    RC_NO_TMP_PATH=1 - disable new temp path for tests, used mostly for test_vcs_operations
 

	
 
"""
 
import os
 
import time
 
import logging
 
import datetime
 
import hashlib
 
import tempfile
 
from os.path import join as jn
 

	
 
from unittest import TestCase
 
from tempfile import _RandomNameSequence
 

	
 
from paste.deploy import loadapp
 
from paste.script.appinstall import SetupCommand
 

	
 
import pylons
 
import pylons.test
 
from pylons import config, url
 
from pylons.i18n.translation import _get_translator
 

	
 
from routes.util import URLGenerator
 
from webtest import TestApp
 
from nose.plugins.skip import SkipTest
 

	
 
from rhodecode import is_windows
 
from rhodecode.model.meta import Session
 
from rhodecode.model.db import User
 
from rhodecode.tests.nose_parametrized import parameterized
 

	
 
import pylons.test
 
from rhodecode.lib.utils2 import safe_unicode, safe_str
 

	
 

	
 
os.environ['TZ'] = 'UTC'
 
if not is_windows:
 
    time.tzset()
 

	
 
log = logging.getLogger(__name__)
 

	
 
__all__ = [
 
    'parameterized', 'environ', 'url', 'get_new_dir', 'TestController',
 
    'SkipTest', 'ldap_lib_installed',
 
    'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO',
 
    'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
 
    'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
 
    'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
 
    'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
 
    'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
 
    'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO',
 
    'GIT_REMOTE_REPO', 'SCM_TESTS',
 
]
 

	
 
# Invoke websetup with the current config file
 
# SetupCommand('setup-app').run([config_file])
 

	
 
environ = {}
 

	
 
#SOME GLOBALS FOR TESTS
 

	
 
TESTS_TMP_PATH = jn('/', 'tmp', 'rc_test_%s' % _RandomNameSequence().next())
 
TEST_USER_ADMIN_LOGIN = 'test_admin'
 
TEST_USER_ADMIN_PASS = 'test12'
 
TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
 

	
 
TEST_USER_REGULAR_LOGIN = 'test_regular'
 
TEST_USER_REGULAR_PASS = 'test12'
 
TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
 

	
 
TEST_USER_REGULAR2_LOGIN = 'test_regular2'
 
TEST_USER_REGULAR2_PASS = 'test12'
 
TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
 

	
 
HG_REPO = 'vcs_test_hg'
 
GIT_REPO = 'vcs_test_git'
 

	
 
NEW_HG_REPO = 'vcs_test_hg_new'
 
NEW_GIT_REPO = 'vcs_test_git_new'
 

	
 
@@ -104,81 +107,87 @@ TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 
 

	
 

	
 
HG_REMOTE_REPO = 'http://bitbucket.org/marcinkuzminski/vcs'
 

	
 
TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
 
TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
 
TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
 

	
 
TEST_DIR = tempfile.gettempdir()
 
TEST_REPO_PREFIX = 'vcs-test'
 

	
 
# cached repos if any !
 
# comment out to get some other repos from bb or github
 
GIT_REMOTE_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
 
HG_REMOTE_REPO = jn(TESTS_TMP_PATH, HG_REPO)
 

	
 
#skip ldap tests if LDAP lib is not installed
 
ldap_lib_installed = False
 
try:
 
    import ldap
 
    ldap_lib_installed = True
 
except ImportError:
 
    # means that python-ldap is not installed
 
    pass
 

	
 

	
 
def get_new_dir(title):
 
    """
 
    Returns always new directory path.
 
    """
 
    from rhodecode.tests.vcs.utils import get_normalized_path
 
    name = TEST_REPO_PREFIX
 
    if title:
 
        name = '-'.join((name, title))
 
    hex = hashlib.sha1(str(time.time())).hexdigest()
 
    name = '-'.join((name, hex))
 
    path = os.path.join(TEST_DIR, name)
 
    return get_normalized_path(path)
 

	
 

	
 
class TestController(TestCase):
 

	
 
    def __init__(self, *args, **kwargs):
 
        wsgiapp = pylons.test.pylonsapp
 
        config = wsgiapp.config
 

	
 
        self.app = TestApp(wsgiapp)
 
        url._push_object(URLGenerator(config['routes.map'], environ))
 
        self.Session = Session
 
        pylons.app_globals._push_object(config['pylons.app_globals'])
 
        pylons.config._push_object(config)
 

	
 
        # Initialize a translator for tests that utilize i18n
 
        translator = _get_translator(pylons.config.get('lang'))
 
        pylons.translator._push_object(translator)
 

	
 
        self.index_location = config['app_conf']['index_dir']
 
        TestCase.__init__(self, *args, **kwargs)
 

	
 
    def log_user(self, username=TEST_USER_ADMIN_LOGIN,
 
                 password=TEST_USER_ADMIN_PASS):
 
        self._logged_username = username
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': username,
 
                                  'password': password})
 

	
 
        if 'invalid user name' in response.body:
 
            self.fail('could not login using %s %s' % (username, password))
 

	
 
        self.assertEqual(response.status, '302 Found')
 
        ses = response.session['rhodecode_user']
 
        self.assertEqual(ses.get('username'), username)
 
        response = response.follow()
 
        self.assertEqual(ses.get('is_authenticated'), True)
 

	
 
        return response.session['rhodecode_user']
 

	
 
    def _get_logged_user(self):
 
        return User.get_by_username(self._logged_username)
 

	
 
    def checkSessionFlash(self, response, msg):
 
        self.assertTrue('flash' in response.session,
 
                        msg='Response session:%r have no flash'
 
                        % response.session)
 
        if not msg in response.session['flash'][0][1]:
 
            msg = u'msg `%s` not found in session flash: got `%s` instead' % (
 
                      msg, response.session['flash'][0][1])
 
            self.fail(safe_str(msg))
rhodecode/tests/functional/test_admin_notifications.py
Show inline comments
 
from rhodecode.tests import *
 
from rhodecode.model.db import Notification, User
 

	
 
from rhodecode.model.user import UserModel
 
from rhodecode.model.notification import NotificationModel
 
from rhodecode.model.meta import Session
 

	
 

	
 
class TestNotificationsController(TestController):
 

	
 
    def tearDown(self):
 
        for n in Notification.query().all():
 
            inst = Notification.get(n.notification_id)
 
            self.Session().delete(inst)
 
        self.Session().commit()
 
            Session().delete(inst)
 
        Session().commit()
 

	
 
    def test_index(self):
 
        self.log_user()
 

	
 
        u1 = UserModel().create_or_update(username='u1', password='qweqwe',
 
                                               email='u1@rhodecode.org',
 
                                               firstname='u1', lastname='u1')
 
        u1 = u1.user_id
 

	
 
        response = self.app.get(url('notifications'))
 
        response.mustcontain('<div class="table">No notifications here yet</div>')
 

	
 
        cur_user = self._get_logged_user()
 

	
 
        NotificationModel().create(created_by=u1, subject=u'test_notification_1',
 
                                   body=u'notification_1',
 
                                   recipients=[cur_user])
 
        self.Session().commit()
 
        Session().commit()
 
        response = self.app.get(url('notifications'))
 
        response.mustcontain(u'test_notification_1')
 

	
 
#    def test_index_as_xml(self):
 
#        response = self.app.get(url('formatted_notifications', format='xml'))
 
#
 
#    def test_create(self):
 
#        response = self.app.post(url('notifications'))
 
#
 
#    def test_new(self):
 
#        response = self.app.get(url('new_notification'))
 
#
 
#    def test_new_as_xml(self):
 
#        response = self.app.get(url('formatted_new_notification', format='xml'))
 
#
 
#    def test_update(self):
 
#        response = self.app.put(url('notification', notification_id=1))
 
#
 
#    def test_update_browser_fakeout(self):
 
#        response = self.app.post(url('notification', notification_id=1), params=dict(_method='put'))
 

	
 
    def test_delete(self):
 
        self.log_user()
 
        cur_user = self._get_logged_user()
 

	
 
        u1 = UserModel().create_or_update(username='u1', password='qweqwe',
 
                                               email='u1@rhodecode.org',
 
                                               firstname='u1', lastname='u1')
 
        u2 = UserModel().create_or_update(username='u2', password='qweqwe',
 
                                               email='u2@rhodecode.org',
 
                                               firstname='u2', lastname='u2')
 

	
 
        # make notifications
 
        notification = NotificationModel().create(created_by=cur_user,
 
                                                  subject=u'test',
 
                                                  body=u'hi there',
 
                                                  recipients=[cur_user, u1, u2])
 
        self.Session().commit()
 
        Session().commit()
 
        u1 = User.get(u1.user_id)
 
        u2 = User.get(u2.user_id)
 

	
 
        # check DB
 
        get_notif = lambda un: [x.notification for x in un]
 
        self.assertEqual(get_notif(cur_user.notifications), [notification])
 
        self.assertEqual(get_notif(u1.notifications), [notification])
 
        self.assertEqual(get_notif(u2.notifications), [notification])
 
        cur_usr_id = cur_user.user_id
 

	
 
        response = self.app.delete(url('notification',
 
                                       notification_id=
 
                                       notification.notification_id))
 
        self.assertEqual(response.body, 'ok')
 

	
 
        cur_user = User.get(cur_usr_id)
 
        self.assertEqual(cur_user.notifications, [])
 

	
 
    def test_show(self):
 
        self.log_user()
 
        cur_user = self._get_logged_user()
 
        u1 = UserModel().create_or_update(username='u1', password='qweqwe',
 
                                               email='u1@rhodecode.org',
 
                                               firstname='u1', lastname='u1')
 
        u2 = UserModel().create_or_update(username='u2', password='qweqwe',
 
                                               email='u2@rhodecode.org',
 
                                               firstname='u2', lastname='u2')
 

	
 
        notification = NotificationModel().create(created_by=cur_user,
 
                                                  subject=u'test',
 
                                                  body=u'hi there',
 
                                                  recipients=[cur_user, u1, u2])
 

	
 
        response = self.app.get(url('notification',
 
                                    notification_id=notification.notification_id))
rhodecode/tests/functional/test_admin_repos.py
Show inline comments
 
@@ -2,353 +2,353 @@
 

	
 
import os
 
import urllib
 

	
 
from rhodecode.lib import vcs
 
from rhodecode.model.db import Repository, RepoGroup, UserRepoToPerm, User,\
 
    Permission
 
from rhodecode.tests import *
 
from rhodecode.model.repos_group import ReposGroupModel
 
from rhodecode.model.repo import RepoModel
 
from rhodecode.model.meta import Session
 
from rhodecode.tests.fixture import Fixture
 

	
 
fixture = Fixture()
 

	
 

	
 
def _get_permission_for_user(user, repo):
 
    perm = UserRepoToPerm.query()\
 
                .filter(UserRepoToPerm.repository ==
 
                        Repository.get_by_repo_name(repo))\
 
                .filter(UserRepoToPerm.user == User.get_by_username(user))\
 
                .all()
 
    return perm
 

	
 

	
 
class TestAdminReposController(TestController):
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(url('repos'))
 
        # Test response...
 

	
 
    def test_index_as_xml(self):
 
        response = self.app.get(url('formatted_repos', format='xml'))
 

	
 
    def test_create_hg(self):
 
        self.log_user()
 
        repo_name = NEW_HG_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 

	
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
    def test_create_hg_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (NEW_HG_REPO, non_ascii)
 
        repo_name_unicode = repo_name.decode('utf8')
 
        description = 'description for newly created repo' + non_ascii
 
        description_unicode = description.decode('utf8')
 
        private = False
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.quote(repo_name), repo_name_unicode))
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_unicode)
 
        self.assertEqual(new_repo.description, description_unicode)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
    def test_create_hg_in_group(self):
 
        self.log_user()
 

	
 
        ## create GROUP
 
        group_name = 'sometest'
 
        gr = ReposGroupModel().create(group_name=group_name,
 
                                      group_description='test',
 
                                      owner=TEST_USER_ADMIN_LOGIN)
 
        self.Session().commit()
 
        Session().commit()
 

	
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,))
 

	
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name))
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name_full)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except Exception:
 
            ReposGroupModel().delete(group_name)
 
            self.Session().commit()
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        ReposGroupModel().delete(group_name)
 
        self.Session().commit()
 
        Session().commit()
 

	
 
    def test_create_git(self):
 
        self.log_user()
 
        repo_name = NEW_GIT_REPO
 
        description = 'description for newly created repo'
 

	
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type='git',
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 

	
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
    def test_create_git_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (NEW_GIT_REPO, non_ascii)
 
        repo_name_unicode = repo_name.decode('utf8')
 
        description = 'description for newly created repo' + non_ascii
 
        description_unicode = description.decode('utf8')
 
        private = False
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type='git',
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 

	
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.quote(repo_name), repo_name_unicode))
 

	
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_unicode)
 
        self.assertEqual(new_repo.description, description_unicode)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
    def test_update(self):
 
        response = self.app.put(url('repo', repo_name=HG_REPO))
 

	
 
    def test_update_browser_fakeout(self):
 
        response = self.app.post(url('repo', repo_name=HG_REPO),
 
                                 params=dict(_method='put'))
 

	
 
    def test_delete_hg(self):
 
        self.log_user()
 
        repo_name = 'vcs_test_new_to_delete'
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type='hg',
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 

	
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.delete(url('repo', repo_name=repo_name))
 

	
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
 

	
 
        response.follow()
 

	
 
        #check if repo was deleted from db
 
        deleted_repo = self.Session().query(Repository)\
 
        deleted_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        self.assertEqual(deleted_repo, None)
 

	
 
        self.assertEqual(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)),
 
                                  False)
 

	
 
    def test_delete_git(self):
 
        self.log_user()
 
        repo_name = 'vcs_test_new_to_delete'
 
        description = 'description for newly created repo'
 
        private = False
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type='git',
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 

	
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 
        #test if the repo was created in the database
 
        new_repo = self.Session().query(Repository)\
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        #test if repository is visible in the list ?
 
        response = response.follow()
 

	
 
        response.mustcontain(repo_name)
 

	
 
        #test if repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.delete(url('repo', repo_name=repo_name))
 

	
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
 

	
 
        response.follow()
 

	
 
        #check if repo was deleted from db
 
        deleted_repo = self.Session().query(Repository)\
 
        deleted_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        self.assertEqual(deleted_repo, None)
 

	
 
        self.assertEqual(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)),
 
                                  False)
 

	
 
    def test_delete_repo_with_group(self):
 
        #TODO:
 
        pass
 

	
 
    def test_delete_browser_fakeout(self):
 
        response = self.app.post(url('repo', repo_name=HG_REPO),
 
                                 params=dict(_method='delete'))
 

	
 
    def test_show_hg(self):
 
        self.log_user()
 
        response = self.app.get(url('repo', repo_name=HG_REPO))
 

	
 
    def test_show_git(self):
 
        self.log_user()
 
        response = self.app.get(url('repo', repo_name=GIT_REPO))
 

	
 

	
 
    def test_edit(self):
 
        response = self.app.get(url('edit_repo', repo_name=HG_REPO))
 

	
 
    def test_set_private_flag_sets_default_to_none(self):
 
        self.log_user()
 
        #initially repository perm should be read
 
        perm = _get_permission_for_user(user='default', repo=HG_REPO)
 
        self.assertTrue(len(perm), 1)
 
        self.assertEqual(perm[0].permission.permission_name, 'repository.read')
 
        self.assertEqual(Repository.get_by_repo_name(HG_REPO).private, False)
 

	
 
        response = self.app.put(url('repo', repo_name=HG_REPO),
 
                        fixture._get_repo_create_params(repo_private=1,
 
                                                repo_name=HG_REPO,
 
                                                user=TEST_USER_ADMIN_LOGIN))
 
        self.checkSessionFlash(response,
 
                               msg='Repository %s updated successfully' % (HG_REPO))
 
        self.assertEqual(Repository.get_by_repo_name(HG_REPO).private, True)
 

	
 
        #now the repo default permission should be None
 
        perm = _get_permission_for_user(user='default', repo=HG_REPO)
 
        self.assertTrue(len(perm), 1)
 
        self.assertEqual(perm[0].permission.permission_name, 'repository.none')
 

	
rhodecode/tests/functional/test_admin_settings.py
Show inline comments
 
# -*- coding: utf-8 -*-
 

	
 
from rhodecode.lib.auth import get_crypt_password, check_password
 
from rhodecode.model.db import User, RhodeCodeSetting, Repository
 
from rhodecode.tests import *
 
from rhodecode.lib import helpers as h
 
from rhodecode.model.user import UserModel
 
from rhodecode.model.scm import ScmModel
 
from rhodecode.model.meta import Session
 

	
 

	
 
class TestAdminSettingsController(TestController):
 

	
 
    def test_index(self):
 
        response = self.app.get(url('admin_settings'))
 
        # Test response...
 

	
 
    def test_index_as_xml(self):
 
        response = self.app.get(url('formatted_admin_settings', format='xml'))
 

	
 
    def test_create(self):
 
        response = self.app.post(url('admin_settings'))
 

	
 
    def test_new(self):
 
        response = self.app.get(url('admin_new_setting'))
 

	
 
    def test_new_as_xml(self):
 
        response = self.app.get(url('formatted_admin_new_setting', format='xml'))
 

	
 
    def test_update(self):
 
        response = self.app.put(url('admin_setting', setting_id=1))
 

	
 
    def test_update_browser_fakeout(self):
 
        response = self.app.post(url('admin_setting', setting_id=1), params=dict(_method='put'))
 

	
 
    def test_delete(self):
 
        response = self.app.delete(url('admin_setting', setting_id=1))
 

	
 
    def test_delete_browser_fakeout(self):
 
        response = self.app.post(url('admin_setting', setting_id=1), params=dict(_method='delete'))
 

	
 
    def test_show(self):
 
        response = self.app.get(url('admin_setting', setting_id=1))
 

	
 
    def test_show_as_xml(self):
 
        response = self.app.get(url('formatted_admin_setting', setting_id=1, format='xml'))
 

	
 
    def test_edit(self):
 
        response = self.app.get(url('admin_edit_setting', setting_id=1))
 

	
 
    def test_edit_as_xml(self):
 
        response = self.app.get(url('formatted_admin_edit_setting',
 
                                    setting_id=1, format='xml'))
 

	
 
    def test_ga_code_active(self):
 
        self.log_user()
 
        old_title = 'RhodeCode'
 
@@ -90,97 +91,97 @@ class TestAdminSettingsController(TestCo
 
                        .get_app_settings()['rhodecode_ga_code'], new_ga_code)
 

	
 
        response = response.follow()
 
        response.mustcontain(no=["_gaq.push(['_setAccount', '%s']);" % new_ga_code])
 

	
 
    def test_title_change(self):
 
        self.log_user()
 
        old_title = 'RhodeCode'
 
        new_title = old_title + '_changed'
 
        old_realm = 'RhodeCode authentication'
 

	
 
        for new_title in ['Changed', 'Żółwik', old_title]:
 
            response = self.app.post(url('admin_setting', setting_id='global'),
 
                                         params=dict(
 
                                                     _method='put',
 
                                                     rhodecode_title=new_title,
 
                                                     rhodecode_realm=old_realm,
 
                                                     rhodecode_ga_code=''
 
                                                     ))
 

	
 
            self.checkSessionFlash(response, 'Updated application settings')
 
            self.assertEqual(RhodeCodeSetting
 
                             .get_app_settings()['rhodecode_title'],
 
                             new_title.decode('utf-8'))
 

	
 
            response = response.follow()
 
            response.mustcontain("""<h1><a href="/">%s</a></h1>""" % new_title)
 

	
 
    def test_my_account(self):
 
        self.log_user()
 
        response = self.app.get(url('admin_settings_my_account'))
 

	
 
        response.mustcontain('value="test_admin')
 

	
 
    @parameterized.expand([('firstname', 'new_username'),
 
                           ('lastname', 'new_username'),
 
                           ('admin', True),
 
                           ('admin', False),
 
                           ('ldap_dn', 'test'),
 
                           ('ldap_dn', None),
 
                           ('active', False),
 
                           ('active', True),
 
                           ('email', 'some@email.com'),
 
                           ])
 
    def test_my_account_update(self, name, expected):
 
        uname = 'testme'
 
        usr = UserModel().create_or_update(username=uname, password='qweqwe',
 
                                           email='testme@rhodecod.org')
 
        self.Session().commit()
 
        Session().commit()
 
        params = usr.get_api_data()
 
        user_id = usr.user_id
 
        self.log_user(username=uname, password='qweqwe')
 
        params.update({name: expected})
 
        params.update({'password_confirmation': ''})
 
        params.update({'new_password': ''})
 

	
 
        try:
 
            response = self.app.put(url('admin_settings_my_account_update',
 
                                        id=user_id), params)
 

	
 
            self.checkSessionFlash(response,
 
                                   'Your account was updated successfully')
 

	
 
            updated_user = User.get_by_username(uname)
 
            updated_params = updated_user.get_api_data()
 
            updated_params.update({'password_confirmation': ''})
 
            updated_params.update({'new_password': ''})
 

	
 
            params['last_login'] = updated_params['last_login']
 
            if name == 'email':
 
                params['emails'] = [expected]
 
            if name == 'ldap_dn':
 
                #cannot update this via form
 
                params['ldap_dn'] = None
 
            if name == 'active':
 
                #my account cannot deactivate account
 
                params['active'] = True
 
            if name == 'admin':
 
                #my account cannot make you an admin !
 
                params['admin'] = False
 

	
 
            self.assertEqual(params, updated_params)
 

	
 
        finally:
 
            UserModel().delete('testme')
 

	
 
    def test_my_account_update_err_email_exists(self):
 
        self.log_user()
 

	
 
        new_email = 'test_regular@mail.com'  # already exisitn email
 
        response = self.app.put(url('admin_settings_my_account_update'),
 
                                params=dict(
 
                                    username='test_admin',
 
                                    new_password='test12',
 
                                    password_confirmation='test122',
 
                                    firstname='NewName',
 
                                    lastname='NewLastname',
rhodecode/tests/functional/test_admin_users.py
Show inline comments
 
from sqlalchemy.orm.exc import NoResultFound
 

	
 
from rhodecode.tests import *
 
from rhodecode.model.db import User, Permission
 
from rhodecode.lib.auth import check_password
 
from rhodecode.model.user import UserModel
 
from rhodecode.model import validators
 
from rhodecode.lib import helpers as h
 
from rhodecode.model.meta import Session
 

	
 

	
 
class TestAdminUsersController(TestController):
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(url('users'))
 
        # Test response...
 

	
 
    def test_index_as_xml(self):
 
        response = self.app.get(url('formatted_users', format='xml'))
 

	
 
    def test_create(self):
 
        self.log_user()
 
        username = 'newtestuser'
 
        password = 'test12'
 
        password_confirmation = password
 
        name = 'name'
 
        lastname = 'lastname'
 
        email = 'mail@mail.com'
 

	
 
        response = self.app.post(url('users'),
 
                             {'username': username,
 
                               'password': password,
 
                               'password_confirmation': password_confirmation,
 
                               'firstname': name,
 
                               'active': True,
 
                               'lastname': lastname,
 
                               'email': email})
 

	
 
        self.checkSessionFlash(response, '''Created user %s''' % (username))
 

	
 
        new_user = self.Session.query(User).\
 
        new_user = Session().query(User).\
 
            filter(User.username == username).one()
 

	
 
        self.assertEqual(new_user.username, username)
 
        self.assertEqual(check_password(password, new_user.password), True)
 
        self.assertEqual(new_user.name, name)
 
        self.assertEqual(new_user.lastname, lastname)
 
        self.assertEqual(new_user.email, email)
 

	
 
        response.follow()
 
        response = response.follow()
 
        response.mustcontain("""newtestuser""")
 

	
 
    def test_create_err(self):
 
        self.log_user()
 
        username = 'new_user'
 
        password = ''
 
        name = 'name'
 
        lastname = 'lastname'
 
        email = 'errmail.com'
 

	
 
        response = self.app.post(url('users'), {'username': username,
 
                                               'password': password,
 
                                               'name': name,
 
                                               'active': False,
 
                                               'lastname': lastname,
 
                                               'email': email})
 

	
 
        msg = validators.ValidUsername(False, {})._messages['system_invalid_username']
 
        msg = h.html_escape(msg % {'username': 'new_user'})
 
        response.mustcontain("""<span class="error-message">%s</span>""" % msg)
 
        response.mustcontain("""<span class="error-message">Please enter a value</span>""")
 
        response.mustcontain("""<span class="error-message">An email address must contain a single @</span>""")
 

	
 
        def get_user():
 
            self.Session.query(User).filter(User.username == username).one()
 
            Session().query(User).filter(User.username == username).one()
 

	
 
        self.assertRaises(NoResultFound, get_user), 'found user in database'
 

	
 
    def test_new(self):
 
        self.log_user()
 
        response = self.app.get(url('new_user'))
 

	
 
    def test_new_as_xml(self):
 
        response = self.app.get(url('formatted_new_user', format='xml'))
 

	
 
    @parameterized.expand([('firstname', 'new_username'),
 
                           ('lastname', 'new_username'),
 
                           ('admin', True),
 
                           ('admin', False),
 
                           ('ldap_dn', 'test'),
 
                           ('ldap_dn', None),
 
                           ('active', False),
 
                           ('active', True),
 
                           ('email', 'some@email.com'),
 
                           ])
 
    def test_update(self, name, expected):
 
        self.log_user()
 
        uname = 'testme'
 
        usr = UserModel().create_or_update(username=uname, password='qweqwe',
 
                                           email='testme@rhodecod.org')
 
        self.Session().commit()
 
        Session().commit()
 
        params = usr.get_api_data()
 
        params.update({name: expected})
 
        params.update({'password_confirmation': ''})
 
        params.update({'new_password': ''})
 
        if name == 'email':
 
            params['emails'] = [expected]
 
        if name == 'ldap_dn':
 
            #cannot update this via form
 
            params['ldap_dn'] = None
 
        try:
 
            response = self.app.put(url('user', id=usr.user_id), params)
 

	
 
            self.checkSessionFlash(response, '''User updated successfully''')
 

	
 
            updated_user = User.get_by_username(uname)
 
            updated_params = updated_user.get_api_data()
 
            updated_params.update({'password_confirmation': ''})
 
            updated_params.update({'new_password': ''})
 

	
 
            self.assertEqual(params, updated_params)
 

	
 
        finally:
 
            UserModel().delete('testme')
 

	
 
    def test_update_browser_fakeout(self):
 
        response = self.app.post(url('user', id=1), params=dict(_method='put'))
 

	
 
    def test_delete(self):
 
        self.log_user()
 
        username = 'newtestuserdeleteme'
 
        password = 'test12'
 
        name = 'name'
 
        lastname = 'lastname'
 
        email = 'todeletemail@mail.com'
 

	
 
        response = self.app.post(url('users'), {'username': username,
 
                                               'password': password,
 
                                               'password_confirmation': password,
 
                                               'firstname': name,
 
                                               'active': True,
 
                                               'lastname': lastname,
 
                                               'email': email})
 

	
 
        response = response.follow()
 

	
 
        new_user = self.Session.query(User)\
 
        new_user = Session().query(User)\
 
            .filter(User.username == username).one()
 
        response = self.app.delete(url('user', id=new_user.user_id))
 

	
 
        self.checkSessionFlash(response, 'Successfully deleted user')
 

	
 
    def test_delete_browser_fakeout(self):
 
        response = self.app.post(url('user', id=1),
 
                                 params=dict(_method='delete'))
 

	
 
    def test_show(self):
 
        response = self.app.get(url('user', id=1))
 

	
 
    def test_show_as_xml(self):
 
        response = self.app.get(url('formatted_user', id=1, format='xml'))
 

	
 
    def test_edit(self):
 
        self.log_user()
 
        user = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        response = self.app.get(url('edit_user', id=user.user_id))
 

	
 
    def test_add_perm_create_repo(self):
 
        self.log_user()
 
        perm_none = Permission.get_by_key('hg.create.none')
 
        perm_create = Permission.get_by_key('hg.create.repository')
 

	
 
        user = UserModel().create_or_update(username='dummy', password='qwe',
 
                                            email='dummy', firstname='a',
 
                                            lastname='b')
 
        Session().commit()
 
        uid = user.user_id
 

	
 
        try:
 
            #User should have None permission on creation repository
 
            self.assertEqual(UserModel().has_perm(user, perm_none), False)
 
            self.assertEqual(UserModel().has_perm(user, perm_create), False)
 

	
 
            response = self.app.post(url('user_perm', id=uid),
 
                                     params=dict(_method='put',
 
                                                 create_repo_perm=True))
 

	
 
            perm_none = Permission.get_by_key('hg.create.none')
 
            perm_create = Permission.get_by_key('hg.create.repository')
 

	
 
            #User should have None permission on creation repository
 
            self.assertEqual(UserModel().has_perm(uid, perm_none), False)
 
            self.assertEqual(UserModel().has_perm(uid, perm_create), True)
 
        finally:
 
            UserModel().delete(uid)
rhodecode/tests/functional/test_admin_users_groups.py
Show inline comments
 
from rhodecode.tests import *
 
from rhodecode.model.db import UserGroup, UserGroupToPerm, Permission
 
from rhodecode.model.meta import Session
 

	
 
TEST_USER_GROUP = 'admins_test'
 

	
 

	
 
class TestAdminUsersGroupsController(TestController):
 

	
 
    def test_index(self):
 
        response = self.app.get(url('users_groups'))
 
        # Test response...
 

	
 
    def test_index_as_xml(self):
 
        response = self.app.get(url('formatted_users_groups', format='xml'))
 

	
 
    def test_create(self):
 
        self.log_user()
 
        users_group_name = TEST_USER_GROUP
 
        response = self.app.post(url('users_groups'),
 
                                 {'users_group_name': users_group_name,
 
                                  'active':True})
 
        response.follow()
 

	
 
        self.checkSessionFlash(response,
 
                               'Created user group %s' % TEST_USER_GROUP)
 

	
 
    def test_new(self):
 
        response = self.app.get(url('new_users_group'))
 

	
 
    def test_new_as_xml(self):
 
        response = self.app.get(url('formatted_new_users_group', format='xml'))
 

	
 
    def test_update(self):
 
        response = self.app.put(url('users_group', id=1))
 

	
 
    def test_update_browser_fakeout(self):
 
        response = self.app.post(url('users_group', id=1),
 
                                 params=dict(_method='put'))
 

	
 
    def test_delete(self):
 
        self.log_user()
 
        users_group_name = TEST_USER_GROUP + 'another'
 
        response = self.app.post(url('users_groups'),
 
                                 {'users_group_name':users_group_name,
 
                                  'active':True})
 
        response.follow()
 

	
 
        self.checkSessionFlash(response,
 
                               'Created user group %s' % users_group_name)
 

	
 
        gr = self.Session.query(UserGroup)\
 
        gr = Session().query(UserGroup)\
 
                           .filter(UserGroup.users_group_name ==
 
                                   users_group_name).one()
 

	
 
        response = self.app.delete(url('users_group', id=gr.users_group_id))
 

	
 
        gr = self.Session.query(UserGroup)\
 
        gr = Session().query(UserGroup)\
 
                           .filter(UserGroup.users_group_name ==
 
                                   users_group_name).scalar()
 

	
 
        self.assertEqual(gr, None)
 

	
 
    def test_enable_repository_read_on_group(self):
 
        self.log_user()
 
        users_group_name = TEST_USER_GROUP + 'another2'
 
        response = self.app.post(url('users_groups'),
 
                                 {'users_group_name': users_group_name,
 
                                  'active': True})
 
        response.follow()
 

	
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        self.checkSessionFlash(response,
 
                               'Created user group %s' % users_group_name)
 
        ## ENABLE REPO CREATE ON A GROUP
 
        response = self.app.put(url('users_group_perm', id=ug.users_group_id),
 
                                 {'create_repo_perm': True})
 

	
 
        response.follow()
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        p = Permission.get_by_key('hg.create.repository')
 
        p2 = Permission.get_by_key('hg.usergroup.create.false')
 
        p3 = Permission.get_by_key('hg.fork.none')
 
        # check if user has this perms, they should be here since
 
        # defaults are on
 
        perms = UserGroupToPerm.query()\
 
            .filter(UserGroupToPerm.users_group == ug).all()
 

	
 
        self.assertEqual(
 
            sorted([[x.users_group_id, x.permission_id, ] for x in perms]),
 
            sorted([[ug.users_group_id, p.permission_id],
 
                    [ug.users_group_id, p2.permission_id],
 
                    [ug.users_group_id, p3.permission_id]])
 
        )
 

	
 
        ## DISABLE REPO CREATE ON A GROUP
 
        response = self.app.put(url('users_group_perm', id=ug.users_group_id),
 
                                    {})
 

	
 
        response.follow()
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        p = Permission.get_by_key('hg.create.none')
 
        p2 = Permission.get_by_key('hg.usergroup.create.false')
 
        p3 = Permission.get_by_key('hg.fork.none')
 

	
 
        # check if user has this perms, they should be here since
 
        # defaults are on
 
        perms = UserGroupToPerm.query()\
 
            .filter(UserGroupToPerm.users_group == ug).all()
 

	
 
        self.assertEqual(
 
            sorted([[x.users_group_id, x.permission_id, ] for x in perms]),
 
            sorted([[ug.users_group_id, p.permission_id],
 
                    [ug.users_group_id, p2.permission_id],
 
                    [ug.users_group_id, p3.permission_id]])
 
        )
 

	
 
        # DELETE !
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        ugid = ug.users_group_id
 
        response = self.app.delete(url('users_group', id=ug.users_group_id))
 
        response = response.follow()
 
        gr = self.Session.query(UserGroup)\
 
        gr = Session().query(UserGroup)\
 
                           .filter(UserGroup.users_group_name ==
 
                                   users_group_name).scalar()
 

	
 
        self.assertEqual(gr, None)
 
        p = Permission.get_by_key('hg.create.repository')
 
        perms = UserGroupToPerm.query()\
 
            .filter(UserGroupToPerm.users_group_id == ugid).all()
 
        perms = [[x.users_group_id,
 
                  x.permission_id, ] for x in perms]
 
        self.assertEqual(
 
            perms,
 
            []
 
        )
 

	
 
    def test_enable_repository_fork_on_group(self):
 
        self.log_user()
 
        users_group_name = TEST_USER_GROUP + 'another2'
 
        response = self.app.post(url('users_groups'),
 
                                 {'users_group_name': users_group_name,
 
                                  'active': True})
 
        response.follow()
 

	
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        self.checkSessionFlash(response,
 
                               'Created user group %s' % users_group_name)
 
        ## ENABLE REPO CREATE ON A GROUP
 
        response = self.app.put(url('users_group_perm', id=ug.users_group_id),
 
                                 {'fork_repo_perm': True})
 

	
 
        response.follow()
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        p = Permission.get_by_key('hg.create.none')
 
        p2 = Permission.get_by_key('hg.usergroup.create.false')
 
        p3 = Permission.get_by_key('hg.fork.repository')
 
        # check if user has this perms, they should be here since
 
        # defaults are on
 
        perms = UserGroupToPerm.query()\
 
            .filter(UserGroupToPerm.users_group == ug).all()
 

	
 
        self.assertEqual(
 
            sorted([[x.users_group_id, x.permission_id, ] for x in perms]),
 
            sorted([[ug.users_group_id, p.permission_id],
 
                    [ug.users_group_id, p2.permission_id],
 
                    [ug.users_group_id, p3.permission_id]])
 
        )
 

	
 
        ## DISABLE REPO CREATE ON A GROUP
 
        response = self.app.put(url('users_group_perm', id=ug.users_group_id),
 
                                    {})
 

	
 
        response.follow()
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        p = Permission.get_by_key('hg.create.none')
 
        p2 = Permission.get_by_key('hg.usergroup.create.false')
 
        p3 = Permission.get_by_key('hg.fork.none')
 
        # check if user has this perms, they should be here since
 
        # defaults are on
 
        perms = UserGroupToPerm.query()\
 
            .filter(UserGroupToPerm.users_group == ug).all()
 

	
 
        self.assertEqual(
 
            sorted([[x.users_group_id, x.permission_id, ] for x in perms]),
 
            sorted([[ug.users_group_id, p.permission_id],
 
                    [ug.users_group_id, p2.permission_id],
 
                    [ug.users_group_id, p3.permission_id]])
 
        )
 

	
 
        # DELETE !
 
        ug = UserGroup.get_by_group_name(users_group_name)
 
        ugid = ug.users_group_id
 
        response = self.app.delete(url('users_group', id=ug.users_group_id))
 
        response = response.follow()
 
        gr = self.Session.query(UserGroup)\
 
        gr = Session().query(UserGroup)\
 
                           .filter(UserGroup.users_group_name ==
 
                                   users_group_name).scalar()
 

	
 
        self.assertEqual(gr, None)
 
        p = Permission.get_by_key('hg.fork.repository')
 
        perms = UserGroupToPerm.query()\
 
            .filter(UserGroupToPerm.users_group_id == ugid).all()
 
        perms = [[x.users_group_id,
 
                  x.permission_id, ] for x in perms]
 
        self.assertEqual(
 
            perms,
 
            []
 
        )
 

	
 
    def test_delete_browser_fakeout(self):
 
        response = self.app.post(url('users_group', id=1),
 
                                 params=dict(_method='delete'))
 

	
 
    def test_show(self):
 
        response = self.app.get(url('users_group', id=1))
 

	
 
    def test_show_as_xml(self):
 
        response = self.app.get(url('formatted_users_group', id=1, format='xml'))
 

	
 
    def test_edit(self):
 
        response = self.app.get(url('edit_users_group', id=1))
 

	
 
    def test_edit_as_xml(self):
 
        response = self.app.get(url('formatted_edit_users_group', id=1, format='xml'))
 

	
 
    def test_assign_members(self):
 
        pass
 

	
 
    def test_add_create_permission(self):
 
        pass
 

	
 
    def test_revoke_members(self):
 
        pass
rhodecode/tests/functional/test_branches.py
Show inline comments
 
from rhodecode.tests import *
 

	
 

	
 
class TestBranchesController(TestController):
 

	
 
    def test_index(self):
 
    def test_index_hg(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='branches',
 
                                    action='index', repo_name=HG_REPO))
 
        response.mustcontain("""<a href="/%s/files/27cd5cce30c96924232dffcd24178a07ffeb5dfc/">default</a>""" % HG_REPO)
 
        response.mustcontain("""<a href="/%s/files/97e8b885c04894463c51898e14387d80c30ed1ee/">git</a>""" % HG_REPO)
 
        response.mustcontain("""<a href="/%s/files/2e6a2bf9356ca56df08807f4ad86d480da72a8f4/">web</a>""" % HG_REPO)
 
        response.mustcontain("""<a href="/%s/files/96507bd11ecc815ebc6270fdf6db110928c09c1e/">default</a>""" % HG_REPO)
 

	
 
        # closed branches
 
        response.mustcontain("""<a href="/%s/changeset/95ca6417ec0de6ac3bd19b336d7b608f27b88711">git [closed]</a><""" % HG_REPO)
 
        response.mustcontain("""<a href="/%s/changeset/0dd5fd7b37a4eea4dd9b662af63cee743b4ccce2">web [closed]</a>""" % HG_REPO)
 

	
 
    def test_index_git(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='branches',
 
                                    action='index', repo_name=GIT_REPO))
 
        response.mustcontain("""<a href="/%s/files/5f2c6ee195929b0be80749243c18121c9864a3b3/">master</a>""" % GIT_REPO)
rhodecode/tests/functional/test_changelog.py
Show inline comments
 
from rhodecode.tests import *
 

	
 

	
 
class TestChangelogController(TestController):
 

	
 
    def test_index_hg(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO))
 

	
 
        response.mustcontain('''id="chg_20" class="container tablerow1"''')
 
        response.mustcontain(
 
            """<input class="changeset_range" """
 
            """id="5e204e7583b9c8e7b93a020bd036564b1e731dae" """
 
            """name="5e204e7583b9c8e7b93a020bd036564b1e731dae" """
 
            """id="7b22a518347bb9bc19679f6af07cd0a61bfe16e7" """
 
            """name="7b22a518347bb9bc19679f6af07cd0a61bfe16e7" """
 
            """type="checkbox" value="1" />"""
 
        )
 

	
 
        #rev 640: code garden
 
        response.mustcontain(
 
            """<span class="changeset_hash">r154:5e204e7583b9</span>"""
 
            """<span class="changeset_hash">r640:0a4e54a44604</span>"""
 
        )
 

	
 
        response.mustcontain("""Small update at simplevcs app""")
 

	
 
#        response.mustcontain(
 
#            """<div id="changed_total_5e204e7583b9c8e7b93a020bd036564b1e731dae" """
 
#            """style="float:right;" class="changed_total tooltip" """
 
#            """title="Affected number of files, click to show """
 
#            """more details">3</div>"""
 
#        )
 
        response.mustcontain("""code garden""")
 

	
 
    def test_index_pagination_hg(self):
 
        self.log_user()
 
        #pagination
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO), {'page': 1})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO), {'page': 2})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO), {'page': 3})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO), {'page': 4})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO), {'page': 5})
 
        response = self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=HG_REPO), {'page': 6})
 

	
 
        # Test response after pagination...
 
        response.mustcontain(
 
            """<input class="changeset_range" """
 
            """id="46ad32a4f974e45472a898c6b0acb600320579b1" """
 
            """name="46ad32a4f974e45472a898c6b0acb600320579b1" """
 
            """id="22baf968d547386b9516965ce89d189665003a31" """
 
            """name="22baf968d547386b9516965ce89d189665003a31" """
 
            """type="checkbox" value="1" />"""
 
        )
 

	
 
        response.mustcontain(
 
            """<span class="changeset_hash">r64:46ad32a4f974</span>"""
 
            """<span class="changeset_hash">r539:22baf968d547</span>"""
 
        )
 

	
 
#        response.mustcontain(
 
#            """<div id="changed_total_46ad32a4f974e45472a898c6b0acb600320579b1" """
 
#            """style="float:right;" class="changed_total tooltip" """
 
#            """title="Affected number of files, click to show """
 
#            """more details">21</div>"""
 
#        )
 
#
 
#        response.mustcontain(
 
#            """<a href="/%s/changeset/"""
 
#            """46ad32a4f974e45472a898c6b0acb600320579b1" """
 
#            """title="Merge with 2e6a2bf9356ca56df08807f4ad86d480da72a8f4">"""
 
#            """46ad32a4f974</a>""" % HG_REPO
 
#        )
 

	
 
    def test_index_git(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO))
 

	
 
        response.mustcontain('''id="chg_20" class="container tablerow1"''')
 
        response.mustcontain(
 
            """<input class="changeset_range" """
 
            """id="95f9a91d775b0084b2368ae7779e44931c849c0e" """
 
            """name="95f9a91d775b0084b2368ae7779e44931c849c0e" """
 
            """type="checkbox" value="1" />"""
 
        )
 

	
 
        response.mustcontain(
 
            """<span class="changeset_hash">r613:95f9a91d775b</span>"""
 
        )
 

	
 
        response.mustcontain("""fixing stupid typo in context for mercurial""")
 

	
 
#        response.mustcontain(
 
#            """<div id="changed_total_5e204e7583b9c8e7b93a020bd036564b1e731dae" """
 
#            """style="float:right;" class="changed_total tooltip" """
 
#            """title="Affected number of files, click to show """
 
#            """more details">3</div>"""
 
#        )
 

	
 
    def test_index_pagination_git(self):
 
        self.log_user()
 
        #pagination
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO), {'page': 1})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO), {'page': 2})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO), {'page': 3})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO), {'page': 4})
 
        self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO), {'page': 5})
 
        response = self.app.get(url(controller='changelog', action='index',
 
                                    repo_name=GIT_REPO), {'page': 6})
 

	
 
        # Test response after pagination...
 
        response.mustcontain(
 
            """<input class="changeset_range" """
 
            """id="636ed213f2f11ef91071b9c24f2d5e6bd01a6ed5" """
 
            """name="636ed213f2f11ef91071b9c24f2d5e6bd01a6ed5" """
 
            """type="checkbox" value="1" />"""
rhodecode/tests/functional/test_changeset_comments.py
Show inline comments
 
from rhodecode.tests import *
 
from rhodecode.model.db import ChangesetComment, Notification, User, \
 
    UserNotification
 
from rhodecode.model.meta import Session
 

	
 

	
 
class TestChangeSetCommentsController(TestController):
 

	
 
    def setUp(self):
 
        for x in ChangesetComment.query().all():
 
            self.Session.delete(x)
 
        self.Session.commit()
 
            Session().delete(x)
 
        Session().commit()
 

	
 
        for x in Notification.query().all():
 
            self.Session.delete(x)
 
        self.Session.commit()
 
            Session().delete(x)
 
        Session().commit()
 

	
 
    def tearDown(self):
 
        for x in ChangesetComment.query().all():
 
            self.Session.delete(x)
 
        self.Session.commit()
 
            Session().delete(x)
 
        Session().commit()
 

	
 
        for x in Notification.query().all():
 
            self.Session.delete(x)
 
        self.Session.commit()
 
            Session().delete(x)
 
        Session().commit()
 

	
 
    def test_create(self):
 
        self.log_user()
 
        rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
 
        text = u'CommentOnRevision'
 

	
 
        params = {'text': text}
 
        response = self.app.post(url(controller='changeset', action='comment',
 
                                     repo_name=HG_REPO, revision=rev),
 
                                     params=params)
 
        # Test response...
 
        self.assertEqual(response.status, '302 Found')
 
        response.follow()
 

	
 
        response = self.app.get(url(controller='changeset', action='index',
 
                                repo_name=HG_REPO, revision=rev))
 
        # test DB
 
        self.assertEqual(ChangesetComment.query().count(), 1)
 
        response.mustcontain('''<div class="comments-number">%s comment '''
 
                             '''(0 inline)</div>''' % 1)
 

	
 
        self.assertEqual(Notification.query().count(), 1)
 
        self.assertEqual(ChangesetComment.query().count(), 1)
 

	
 
        notification = Notification.query().all()[0]
 

	
 
        ID = ChangesetComment.query().first().comment_id
 
        self.assertEqual(notification.type_,
 
                         Notification.TYPE_CHANGESET_COMMENT)
 
        sbj = (u'/vcs_test_hg/changeset/'
 
               '27cd5cce30c96924232dffcd24178a07ffeb5dfc#comment-%s' % ID)
 
        print "%s vs %s" % (sbj, notification.subject)
 
        self.assertTrue(sbj in notification.subject)
 

	
 
    def test_create_inline(self):
 
        self.log_user()
 
        rev = '27cd5cce30c96924232dffcd24178a07ffeb5dfc'
 
        text = u'CommentOnRevision'
 
        f_path = 'vcs/web/simplevcs/views/repository.py'
 
        line = 'n1'
 

	
 
        params = {'text': text, 'f_path': f_path, 'line': line}
 
        response = self.app.post(url(controller='changeset', action='comment',
 
                                     repo_name=HG_REPO, revision=rev),
 
                                     params=params)
 
        # Test response...
 
        self.assertEqual(response.status, '302 Found')
 
        response.follow()
rhodecode/tests/functional/test_compare_local.py
Show inline comments
 
from rhodecode.tests import *
 
from rhodecode.model.repo import RepoModel
 
from rhodecode.model.meta import Session
 
from rhodecode.model.db import Repository
 
from rhodecode.model.scm import ScmModel
 
from rhodecode.lib.vcs.backends.base import EmptyChangeset
 

	
 

	
 
class TestCompareController(TestController):
 

	
 
    def test_compare_tag_hg(self):
 
        self.log_user()
 
        tag1 = '0.1.2'
 
        tag2 = '0.1.3'
 
        tag1 = 'v0.1.2'
 
        tag2 = 'v0.1.3'
 
        response = self.app.get(url(controller='compare', action='index',
 
                                    repo_name=HG_REPO,
 
                                    org_ref_type="tag",
 
                                    org_ref=tag1,
 
                                    other_ref_type="tag",
 
                                    other_ref=tag2,
 
                                    ))
 
                                    ), status=200)
 
        response.mustcontain('%s@%s -&gt; %s@%s' % (HG_REPO, tag1, HG_REPO, tag2))
 

	
 
        ## outgoing changesets between tags
 
        response.mustcontain('''<a href="/%s/changeset/c5ddebc06eaaba3010c2d66ea6ec9d074eb0f678">r112:c5ddebc06eaa</a>''' % HG_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/70d4cef8a37657ee4cf5aabb3bd9f68879769816">r115:70d4cef8a376</a>''' % HG_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/9749bfbfc0d2eba208d7947de266303b67c87cda">r116:9749bfbfc0d2</a>''' % HG_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/41fda979f02fda216374bf8edac4e83f69e7581c">r117:41fda979f02f</a>''' % HG_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/bb1a3ab98cc45cb934a77dcabf87a5a598b59e97">r118:bb1a3ab98cc4</a>''' % HG_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/36e0fc9d2808c5022a24f49d6658330383ed8666">r119:36e0fc9d2808</a>''' % HG_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/17544fbfcd33ffb439e2b728b5d526b1ef30bfcf">r120:17544fbfcd33</a>''' % HG_REPO)
 

	
 
        response.mustcontain('11 files changed with 94 insertions and 64 deletions')
 

	
 
        ## files diff
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--1c5cf9e91c12">docs/api/utils/index.rst</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--e3305437df55">test_and_report.sh</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--c8e92ef85cd1">.hgignore</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--6e08b694d687">.hgtags</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--2c14b00f3393">docs/api/index.rst</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--430ccbc82bdf">vcs/__init__.py</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--9c390eb52cd6">vcs/backends/hg.py</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--ebb592c595c0">vcs/utils/__init__.py</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--7abc741b5052">vcs/utils/annotate.py</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--2ef0ef106c56">vcs/utils/diffs.py</a></div>''' % (HG_REPO, tag1, tag2))
 
        response.mustcontain('''<div class="node"><a href="/%s/compare/tag@%s...tag@%s#C--3150cb87d4b7">vcs/utils/lazy.py</a></div>''' % (HG_REPO, tag1, tag2))
 

	
 
    def test_compare_tag_git(self):
 
        self.log_user()
 
        tag1 = 'v0.1.2'
 
        tag2 = 'v0.1.3'
 
        response = self.app.get(url(controller='compare', action='index',
 
                                    repo_name=GIT_REPO,
 
                                    org_ref_type="tag",
 
                                    org_ref=tag1,
 
                                    other_ref_type="tag",
 
                                    other_ref=tag2,
 
                                    ))
 
                                    ), status=200)
 
        response.mustcontain('%s@%s -&gt; %s@%s' % (GIT_REPO, tag1, GIT_REPO, tag2))
 

	
 
        ## outgoing changesets between tags
 
        response.mustcontain('''<a href="/%s/changeset/794bbdd31545c199f74912709ea350dedcd189a2">r113:794bbdd31545</a>''' % GIT_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/e36d8c5025329bdd4212bd53d4ed8a70ff44985f">r115:e36d8c502532</a>''' % GIT_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/5c9ff4f6d7508db0e72b1d2991c357d0d8e07af2">r116:5c9ff4f6d750</a>''' % GIT_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/b7187fa2b8c1d773ec35e9dee12f01f74808c879">r117:b7187fa2b8c1</a>''' % GIT_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/5f3b74262014a8de2dc7dade1152de9fd0c8efef">r118:5f3b74262014</a>''' % GIT_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/17438a11f72b93f56d0e08e7d1fa79a378578a82">r119:17438a11f72b</a>''' % GIT_REPO)
 
        response.mustcontain('''<a href="/%s/changeset/5a3a8fb005554692b16e21dee62bf02667d8dc3e">r120:5a3a8fb00555</a>''' % GIT_REPO)
 

	
 
        response.mustcontain('11 files changed with 94 insertions and 64 deletions')
 

	
 
        #files
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--1c5cf9e91c12">docs/api/utils/index.rst</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--e3305437df55">test_and_report.sh</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--c8e92ef85cd1">.hgignore</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--6e08b694d687">.hgtags</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--2c14b00f3393">docs/api/index.rst</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--430ccbc82bdf">vcs/__init__.py</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--9c390eb52cd6">vcs/backends/hg.py</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--ebb592c595c0">vcs/utils/__init__.py</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--7abc741b5052">vcs/utils/annotate.py</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--2ef0ef106c56">vcs/utils/diffs.py</a>''' % (GIT_REPO, tag1, tag2))
 
        response.mustcontain('''<a href="/%s/compare/tag@%s...tag@%s#C--3150cb87d4b7">vcs/utils/lazy.py</a>''' % (GIT_REPO, tag1, tag2))
 

	
 
    def test_index_branch_hg(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='compare', action='index',
 
                                    repo_name=HG_REPO,
 
                                    org_ref_type="branch",
 
                                    org_ref='default',
 
                                    other_ref_type="branch",
 
                                    other_ref='default',
 
                                    ))
 

	
 
        response.mustcontain('%s@default -&gt; %s@default' % (HG_REPO, HG_REPO))
 
        # branch are equal
 
        response.mustcontain('<span class="empty_data">No files</span>')
 
        response.mustcontain('<span class="empty_data">No changesets</span>')
 

	
 
    def test_index_branch_git(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='compare', action='index',
 
                                    repo_name=GIT_REPO,
 
                                    org_ref_type="branch",
 
                                    org_ref='master',
 
                                    other_ref_type="branch",

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)