Changeset - ebc4cc82cce2
[Not reviewed]
Merge default
0 8 0
Mads Kiilerich - 10 years ago 2015-11-07 13:24:44
madski@unity3d.com
Merge stable
6 files changed with 7 insertions and 7 deletions:
0 comments (0 inline, 0 general)
kallithea/lib/auth_modules/auth_pam.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth_modules.auth_pam
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea authentication library for PAM
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Created on Apr 09, 2013
 
:author: Alexey Larikov
 
"""
 

	
 
import logging
 
import time
 
import pam
 
import pwd
 
import grp
 
import re
 
import socket
 
import threading
 

	
 
from kallithea.lib import auth_modules
 
from kallithea.lib.compat import formatted_json, hybrid_property
 

	
 
log = logging.getLogger(__name__)
 

	
 
# Cache to store PAM authenticated users
 
_auth_cache = dict()
 
_pam_lock = threading.Lock()
 

	
 

	
 
class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin):
 
    # PAM authnetication can be slow. Repository operations involve a lot of
 
    # auth calls. Little caching helps speedup push/pull operations significantly
 
    AUTH_CACHE_TTL = 4
 

	
 
    def __init__(self):
 
        global _auth_cache
 
        ts = time.time()
 
        cleared_cache = dict(
 
            [(k, v) for (k, v) in _auth_cache.items() if
 
             (v + KallitheaAuthPlugin.AUTH_CACHE_TTL > ts)])
 
        _auth_cache = cleared_cache
 

	
 
    @hybrid_property
 
    def name(self):
 
        return "pam"
 

	
 
    def settings(self):
 
        settings = [
 
            {
 
                "name": "service",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "PAM service name to use for authentication",
 
                "default": "login",
 
                "formname": "PAM service name"
 
            },
 
            {
 
                "name": "gecos",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "Regex for extracting user name/email etc "
 
                               "from Unix userinfo",
 
                "default": "(?P<last_name>.+),\s*(?P<first_name>\w+)",
 
                "formname": "Gecos Regex"
 
            }
 
        ]
 
        return settings
 

	
 
    def use_fake_password(self):
 
        return True
 

	
 
    def auth(self, userobj, username, password, settings, **kwargs):
 
        if username not in _auth_cache:
 
            # Need lock here, as PAM authentication is not thread safe
 
            _pam_lock.acquire()
 
            try:
 
                auth_result = pam.authenticate(username, password,
 
                                               settings["service"])
 
                # cache result only if we properly authenticated
 
                if auth_result:
 
                    _auth_cache[username] = time.time()
 
            finally:
 
                _pam_lock.release()
 

	
 
            if not auth_result:
 
                log.error("PAM was unable to authenticate user: %s", username)
 
                return None
 
        else:
 
            log.debug("Using cached auth for user: %s", username)
 

	
 
        # old attrs fetched from Kallithea database
 
        admin = getattr(userobj, 'admin', False)
 
        active = getattr(userobj, 'active', True)
 
        email = getattr(userobj, 'email', '') or "%s@%s" % (username, socket.gethostname())
 
        firstname = getattr(userobj, 'firstname', '')
 
        lastname = getattr(userobj, 'lastname', '')
 

	
 
        user_data = {
 
            'username': username,
 
            'firstname': firstname,
 
            'lastname': lastname,
 
            'groups': [g.gr_name for g in grp.getgrall() if username in g.gr_mem],
 
            'email': email,
 
            'admin': admin,
 
            'active': active,
 
            "active_from_extern": None,
 
            'extern_name': username,
 
        }
 

	
 
        try:
 
            user_data = pwd.getpwnam(username)
 
            user_pw_data = pwd.getpwnam(username)
 
            regex = settings["gecos"]
 
            match = re.search(regex, user_data.pw_gecos)
 
            match = re.search(regex, user_pw_data.pw_gecos)
 
            if match:
 
                user_data["firstname"] = match.group('first_name')
 
                user_data["lastname"] = match.group('last_name')
 
        except Exception:
 
            log.warning("Cannot extract additional info for PAM user %s", username)
 
            pass
 

	
 
        log.debug("pamuser: \n%s", formatted_json(user_data))
 
        log.info('user %s authenticated correctly', user_data['username'])
 
        return user_data
 

	
 
    def get_managed_fields(self):
 
        return ['username', 'password']
kallithea/lib/paster_commands/update_repoinfo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.update_repoinfo
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
update-repoinfo paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import os
 
import sys
 
import logging
 
import string
 

	
 
from kallithea.lib.utils import BasePasterCommand
 
from kallithea.model.db import Repository
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.meta import Session
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Updates repositories caches for last changeset"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        repo_update_list = map(string.strip,
 
                               self.options.repo_update_list.split(',')) \
 
                               if self.options.repo_update_list else None
 

	
 
        if repo_update_list:
 
        if repo_update_list is not None:
 
            repo_list = list(Repository.query()\
 
                .filter(Repository.repo_name.in_(repo_update_list)))
 
        else:
 
            repo_list = Repository.getAll()
 
        RepoModel.update_repoinfo(repositories=repo_list)
 
        Session().commit()
 

	
 
        if self.options.invalidate_cache:
 
            for r in repo_list:
 
                r.set_invalidate()
 
        print 'Updated cache for %s repositories' % (len(repo_list))
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--update-only',
 
                           action='store',
 
                           dest='repo_update_list',
 
                           help="Specifies a comma separated list of repositories "
 
                                "to update last commit info for. OPTIONAL")
 
        self.parser.add_option('--invalidate-cache',
 
                           action='store_true',
 
                           dest='invalidate_cache',
 
                           help="Trigger cache invalidation event for repos. "
 
                                "OPTIONAL")
kallithea/lib/vcs/backends/git/changeset.py
Show inline comments
 
import re
 
from itertools import chain
 
from dulwich import objects
 
from subprocess import Popen, PIPE
 

	
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.lib.vcs.exceptions import (
 
    RepositoryError, ChangesetError, NodeDoesNotExistError, VCSError,
 
    ChangesetDoesNotExistError, ImproperArchiveTypeError
 
)
 
from kallithea.lib.vcs.nodes import (
 
    FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
 
    ChangedFileNodesGenerator, AddedFileNodesGenerator, RemovedFileNodesGenerator
 
)
 
from kallithea.lib.vcs.utils import (
 
    safe_unicode, safe_str, safe_int, date_fromtimestamp
 
)
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class GitChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at single revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self._stat_modes = {}
 
        self.repository = repository
 
        revision = safe_str(revision)
 
        try:
 
            commit = self.repository._repo[revision]
 
            if isinstance(commit, objects.Tag):
 
                revision = safe_str(commit.object[1])
 
                commit = self.repository._repo.get_object(commit.object[1])
 
        except KeyError:
 
            raise RepositoryError("Cannot get object with id %s" % revision)
 
        self.raw_id = revision
 
        self.id = self.raw_id
 
        self.short_id = self.raw_id[:12]
 
        self._commit = commit
 
        self._tree_id = commit.tree
 
        self._committer_property = 'committer'
 
        self._author_property = 'author'
 
        self._date_property = 'commit_time'
 
        self._date_tz_property = 'commit_timezone'
 
        self.revision = repository.revisions.index(revision)
 

	
 
        self.nodes = {}
 
        self._paths = {}
 

	
 
    @LazyProperty
 
    def message(self):
 
        return safe_unicode(self._commit.message)
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_unicode(getattr(self._commit, self._committer_property))
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_unicode(getattr(self._commit, self._author_property))
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(getattr(self._commit, self._date_property),
 
                                  getattr(self._commit, self._date_tz_property))
 

	
 
    @LazyProperty
 
    def _timestamp(self):
 
        return getattr(self._commit, self._date_property)
 

	
 
    @LazyProperty
 
    def status(self):
 
        """
 
        Returns modified, added, removed, deleted files for current changeset
 
        """
 
        return self.changed, self.added, self.removed
 

	
 
    @LazyProperty
 
    def tags(self):
 
        _tags = []
 
        for tname, tsha in self.repository.tags.iteritems():
 
            if tsha == self.raw_id:
 
                _tags.append(tname)
 
        return _tags
 

	
 
    @LazyProperty
 
    def branch(self):
 

	
 
        heads = self.repository._heads(reverse=False)
 

	
 
        ref = heads.get(self.raw_id)
 
        if ref:
 
            return safe_unicode(ref)
 

	
 
    def _fix_path(self, path):
 
        """
 
        Paths are stored without trailing slash so we need to get rid off it if
 
        needed.
 
        """
 
        if path.endswith('/'):
 
            path = path.rstrip('/')
 
        return path
 

	
 
    def _get_id_for_path(self, path):
 
        path = safe_str(path)
 
        # FIXME: Please, spare a couple of minutes and make those codes cleaner;
 
        if not path in self._paths:
 
            path = path.strip('/')
 
            # set root tree
 
            tree = self.repository._repo[self._tree_id]
 
            if path == '':
 
                self._paths[''] = tree.id
 
                return tree.id
 
            splitted = path.split('/')
 
            dirs, name = splitted[:-1], splitted[-1]
 
            curdir = ''
 

	
 
            # initially extract things from root dir
 
            for item, stat, id in tree.iteritems():
 
                if curdir:
 
                    name = '/'.join((curdir, item))
 
                else:
 
                    name = item
 
                self._paths[name] = id
 
                self._stat_modes[name] = stat
 

	
 
            for dir in dirs:
 
                if curdir:
 
                    curdir = '/'.join((curdir, dir))
 
                else:
 
                    curdir = dir
 
                dir_id = None
 
                for item, stat, id in tree.iteritems():
 
                    if dir == item:
 
                        dir_id = id
 
                if dir_id:
 
                    # Update tree
 
                    tree = self.repository._repo[dir_id]
 
                    if not isinstance(tree, objects.Tree):
 
                        raise ChangesetError('%s is not a directory' % curdir)
 
                else:
 
                    raise ChangesetError('%s have not been found' % curdir)
 

	
 
                # cache all items from the given traversed tree
 
                for item, stat, id in tree.iteritems():
 
                    if curdir:
 
                        name = '/'.join((curdir, item))
 
                    else:
 
                        name = item
 
                    self._paths[name] = id
 
                    self._stat_modes[name] = stat
 
            if not path in self._paths:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path '%s' at revision %s"
 
                    % (path, safe_str(self.short_id)))
 
        return self._paths[path]
 

	
 
    def _get_kind(self, path):
 
        obj = self.repository._repo[self._get_id_for_path(path)]
 
        if isinstance(obj, objects.Blob):
 
            return NodeKind.FILE
 
        elif isinstance(obj, objects.Tree):
 
            return NodeKind.DIR
 

	
 
    def _get_filectx(self, path):
 
        path = self._fix_path(path)
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return path
 

	
 
    def _get_file_nodes(self):
 
        return chain(*(t[2] for t in self.walk()))
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(parent)
 
                for parent in self._commit.parents]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        rev_filter = settings.GIT_REV_FILTER
 
        so, se = self.repository.run_git_command(
 
            ['rev-list', rev_filter, '--children']
 
        )
 

	
 
        children = []
 
        pat = re.compile(r'^%s' % self.raw_id)
 
        for l in so.splitlines():
 
            if pat.match(l):
 
                childs = l.split(' ')[1:]
 
                children.extend(childs)
 
        return [self.repository.get_changeset(cs) for cs in children]
 

	
 
    def next(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                next_ = cs.revision + 1
 
                next_rev = cs.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(next_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                prev_ = cs.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self, ignore_whitespace=True, context=3):
 
        rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET
 
        rev2 = self
 
        return ''.join(self.repository.get_diff(rev1, rev2,
 
                                    ignore_whitespace=ignore_whitespace,
 
                                    context=context))
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        # ensure path is traversed
 
        path = safe_str(path)
 
        self._get_id_for_path(path)
 
        return self._stat_modes[path]
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.as_pretty_string()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        id = self._get_id_for_path(path)
 
        blob = self.repository._repo[id]
 
        return blob.raw_length()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        return self.get_file_history(path, limit=1)[0]
 

	
 
    def get_file_history(self, path, limit=None):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        TODO: This function now uses os underlying 'git' and 'grep' commands
 
        which is generally not good. Should be replaced with algorithm
 
        iterating commits.
 
        """
 
        self._get_filectx(path)
 
        cs_id = safe_str(self.id)
 
        f_path = safe_str(path)
 

	
 
        if limit:
 
        if limit is not None:
 
            cmd = ['log', '-n', str(safe_int(limit, 0)),
 
                   '--pretty=format:%H', '-s', cs_id, '--', f_path]
 

	
 
        else:
 
            cmd = ['log',
 
                   '--pretty=format:%H', '-s', cs_id, '--', f_path]
 
        so, se = self.repository.run_git_command(cmd)
 
        ids = re.findall(r'[0-9a-fA-F]{40}', so)
 
        return [self.repository.get_changeset(sha) for sha in ids]
 

	
 
    def get_file_history_2(self, path):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 

	
 
        """
 
        self._get_filectx(path)
 
        from dulwich.walk import Walker
 
        include = [self.id]
 
        walker = Walker(self.repository._repo.object_store, include,
 
                        paths=[path], max_entries=1)
 
        return [self.repository.get_changeset(sha)
 
                for sha in (x.commit.id for x in walker)]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 

	
 
        TODO: This function now uses os underlying 'git' command which is
 
        generally not good. Should be replaced with algorithm iterating
 
        commits.
 
        """
 
        cmd = ['blame', '-l', '--root', '-r', self.id, '--', path]
 
        # -l     ==> outputs long shas (and we need all 40 characters)
 
        # --root ==> doesn't put '^' character for boundaries
 
        # -r sha ==> blames for the given revision
 
        so, se = self.repository.run_git_command(cmd)
 

	
 
        for i, blame_line in enumerate(so.split('\n')[:-1]):
 
            ln_no = i + 1
 
            sha, line = re.split(r' ', blame_line, 1)
 
            yield (ln_no, sha, lambda: self.repository.get_changeset(sha), line)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 

	
 
        """
 
        allowed_kinds = settings.ARCHIVE_SPECS.keys()
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s', allowed_kinds)
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        if kind == 'zip':
 
            frmt = 'zip'
 
        else:
 
            frmt = 'tar'
 
        _git_path = settings.GIT_EXECUTABLE_PATH
 
        cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path,
 
                                                frmt, prefix, self.raw_id)
 
        if kind == 'tgz':
 
            cmd += ' | gzip -9'
 
        elif kind == 'tbz2':
 
            cmd += ' | bzip2 -9'
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 
        popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True,
 
                      cwd=self.repository.path)
 

	
 
        buffer_size = 1024 * 8
 
        chunk = popen.stdout.read(buffer_size)
 
        while chunk:
 
            stream.write(chunk)
 
            chunk = popen.stdout.read(buffer_size)
 
        # Make sure all descriptors would be read
 
        popen.communicate()
 

	
 
    def get_nodes(self, path):
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %s at "
 
                " '%s'" % (self.revision, path))
 
        path = self._fix_path(path)
 
        id = self._get_id_for_path(path)
 
        tree = self.repository._repo[id]
 
        dirnodes = []
 
        filenodes = []
 
        als = self.repository.alias
 
        for name, stat, id in tree.iteritems():
 
            if objects.S_ISGITLINK(stat):
 
                dirnodes.append(SubModuleNode(name, url=None, changeset=id,
 
                                              alias=als))
 
                continue
 

	
 
            obj = self.repository._repo.get_object(id)
 
            if path != '':
 
                obj_path = '/'.join((path, name))
 
            else:
 
                obj_path = name
 
            if obj_path not in self._stat_modes:
 
                self._stat_modes[obj_path] = stat
 
            if isinstance(obj, objects.Tree):
 
                dirnodes.append(DirNode(obj_path, changeset=self))
 
            elif isinstance(obj, objects.Blob):
 
                filenodes.append(FileNode(obj_path, changeset=self, mode=stat))
 
            else:
 
                raise ChangesetError("Requested object should be Tree "
 
                                     "or Blob, is %r" % type(obj))
 
        nodes = dirnodes + filenodes
 
        for node in nodes:
 
            if not node.path in self.nodes:
 
                self.nodes[node.path] = node
 
        nodes.sort()
 
        return nodes
 

	
 
    def get_node(self, path):
 
        if isinstance(path, unicode):
 
            path = path.encode('utf-8')
 
        path = self._fix_path(path)
 
        if not path in self.nodes:
 
            try:
 
                id_ = self._get_id_for_path(path)
 
            except ChangesetError:
 
                raise NodeDoesNotExistError("Cannot find one of parents' "
 
                    "directories for a given path: %s" % path)
 

	
 
            _GL = lambda m: m and objects.S_ISGITLINK(m)
 
            if _GL(self._stat_modes.get(path)):
 
                node = SubModuleNode(path, url=None, changeset=id_,
 
                                     alias=self.repository.alias)
 
            else:
 
                obj = self.repository._repo.get_object(id_)
 

	
 
                if isinstance(obj, objects.Tree):
 
                    if path == '':
 
                        node = RootNode(changeset=self)
 
                    else:
 
                        node = DirNode(path, changeset=self)
 
                    node._tree = obj
 
                elif isinstance(obj, objects.Blob):
 
                    node = FileNode(path, changeset=self)
 
                    node._blob = obj
 
                else:
 
                    raise NodeDoesNotExistError("There is no file nor directory "
 
                        "at the given path '%s' at revision %s"
 
                        % (path, self.short_id))
 
            # cache node
 
            self.nodes[path] = node
 
        return self.nodes[path]
 

	
 
    @LazyProperty
 
    def affected_files(self):
 
        """
 
        Gets a fast accessible file changes for given changeset
 
        """
 
        added, modified, deleted = self._changes_cache
 
        return list(added.union(modified).union(deleted))
 

	
 
    @LazyProperty
 
    def _diff_name_status(self):
 
        output = []
 
        for parent in self.parents:
 
            cmd = ['diff', '--name-status', parent.raw_id, self.raw_id,
 
                   '--encoding=utf8']
 
            so, se = self.repository.run_git_command(cmd)
 
            output.append(so.strip())
 
        return '\n'.join(output)
 

	
 
    @LazyProperty
 
    def _changes_cache(self):
 
        added = set()
 
        modified = set()
 
        deleted = set()
 
        _r = self.repository._repo
 

	
 
        parents = self.parents
 
        if not self.parents:
 
            parents = [EmptyChangeset()]
 
        for parent in parents:
 
            if isinstance(parent, EmptyChangeset):
 
                oid = None
 
            else:
 
                oid = _r[parent.raw_id].tree
 
            changes = _r.object_store.tree_changes(oid, _r[self.raw_id].tree)
 
            for (oldpath, newpath), (_, _), (_, _) in changes:
 
                if newpath and oldpath:
 
                    modified.add(newpath)
 
                elif newpath and not oldpath:
 
                    added.add(newpath)
 
                elif not newpath and oldpath:
 
                    deleted.add(oldpath)
 
        return added, modified, deleted
 

	
 
    def _get_paths_for_status(self, status):
 
        """
 
        Returns sorted list of paths for given ``status``.
 

	
 
        :param status: one of: *added*, *modified* or *deleted*
 
        """
 
        added, modified, deleted = self._changes_cache
 
        return sorted({
 
            'added': list(added),
 
            'modified': list(modified),
 
            'deleted': list(deleted)}[status]
 
        )
 

	
 
    @LazyProperty
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return list(self._get_file_nodes())
 
        return AddedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('added')], self)
 

	
 
    @LazyProperty
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return ChangedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('modified')], self)
 

	
 
    @LazyProperty
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        if not self.parents:
 
            return []
 
        return RemovedFileNodesGenerator([n for n in
 
                                self._get_paths_for_status('deleted')], self)
 

	
 
    extra = {}
kallithea/lib/vcs/backends/hg/changeset.py
Show inline comments
 
import os
 
import posixpath
 

	
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.backends.base import BaseChangeset
 
from kallithea.lib.vcs.exceptions import (
 
    ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError,
 
    NodeDoesNotExistError, VCSError
 
)
 
from kallithea.lib.vcs.nodes import (
 
    AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
 
    NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode
 
)
 
from kallithea.lib.vcs.utils import safe_str, safe_unicode, date_fromtimestamp
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.utils.paths import get_dirs_for_path
 
from kallithea.lib.vcs.utils.hgcompat import archival, hex
 

	
 
from mercurial import obsolete
 

	
 
class MercurialChangeset(BaseChangeset):
 
    """
 
    Represents state of the repository at the single revision.
 
    """
 

	
 
    def __init__(self, repository, revision):
 
        self.repository = repository
 
        assert isinstance(revision, basestring), repr(revision)
 
        self.raw_id = revision
 
        self._ctx = repository._repo[revision]
 
        self.revision = self._ctx._rev
 
        self.nodes = {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return map(safe_unicode, self._ctx.tags())
 

	
 
    @LazyProperty
 
    def branch(self):
 
        return  safe_unicode(self._ctx.branch())
 

	
 
    @LazyProperty
 
    def closesbranch(self):
 
        return  self._ctx.closesbranch()
 

	
 
    @LazyProperty
 
    def obsolete(self):
 
        return  self._ctx.obsolete()
 

	
 
    @LazyProperty
 
    def successors(self):
 
        successors = obsolete.successorssets(self._ctx._repo, self._ctx.node())
 
        if successors:
 
            # flatten the list here handles both divergent (len > 1)
 
            # and the usual case (len = 1)
 
            successors = [hex(n)[:12] for sub in successors for n in sub if n != self._ctx.node()]
 

	
 
        return successors
 

	
 
    @LazyProperty
 
    def precursors(self):
 
        precursors = set()
 
        nm = self._ctx._repo.changelog.nodemap
 
        for p in self._ctx._repo.obsstore.precursors.get(self._ctx.node(), ()):
 
            pr = nm.get(p[0])
 
            if pr is not None:
 
                precursors.add(hex(p[0])[:12])
 
        return precursors
 

	
 
    @LazyProperty
 
    def bookmarks(self):
 
        return map(safe_unicode, self._ctx.bookmarks())
 

	
 
    @LazyProperty
 
    def message(self):
 
        return safe_unicode(self._ctx.description())
 

	
 
    @LazyProperty
 
    def committer(self):
 
        return safe_unicode(self.author)
 

	
 
    @LazyProperty
 
    def author(self):
 
        return safe_unicode(self._ctx.user())
 

	
 
    @LazyProperty
 
    def date(self):
 
        return date_fromtimestamp(*self._ctx.date())
 

	
 
    @LazyProperty
 
    def _timestamp(self):
 
        return self._ctx.date()[0]
 

	
 
    @LazyProperty
 
    def status(self):
 
        """
 
        Returns modified, added, removed, deleted files for current changeset
 
        """
 
        return self.repository._repo.status(self._ctx.p1().node(),
 
                                            self._ctx.node())
 

	
 
    @LazyProperty
 
    def _file_paths(self):
 
        return list(self._ctx)
 

	
 
    @LazyProperty
 
    def _dir_paths(self):
 
        p = list(set(get_dirs_for_path(*self._file_paths)))
 
        p.insert(0, '')
 
        return p
 

	
 
    @LazyProperty
 
    def _paths(self):
 
        return self._dir_paths + self._file_paths
 

	
 
    @LazyProperty
 
    def id(self):
 
        if self.last:
 
            return u'tip'
 
        return self.short_id
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    @LazyProperty
 
    def parents(self):
 
        """
 
        Returns list of parents changesets.
 
        """
 
        return [self.repository.get_changeset(parent.rev())
 
                for parent in self._ctx.parents() if parent.rev() >= 0]
 

	
 
    @LazyProperty
 
    def children(self):
 
        """
 
        Returns list of children changesets.
 
        """
 
        return [self.repository.get_changeset(child.rev())
 
                for child in self._ctx.children() if child.rev() >= 0]
 

	
 
    def next(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                next_ = cs.revision + 1
 
                next_rev = cs.repository.revisions[next_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(next_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def prev(self, branch=None):
 
        if branch and self.branch != branch:
 
            raise VCSError('Branch option used on changeset not belonging '
 
                           'to that branch')
 

	
 
        cs = self
 
        while True:
 
            try:
 
                prev_ = cs.revision - 1
 
                if prev_ < 0:
 
                    raise IndexError
 
                prev_rev = cs.repository.revisions[prev_]
 
            except IndexError:
 
                raise ChangesetDoesNotExistError
 
            cs = cs.repository.get_changeset(prev_rev)
 

	
 
            if not branch or branch == cs.branch:
 
                return cs
 

	
 
    def diff(self, ignore_whitespace=True, context=3):
 
        return ''.join(self._ctx.diff(git=True,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=context))
 

	
 
    def _fix_path(self, path):
 
        """
 
        Paths are stored without trailing slash so we need to get rid off it if
 
        needed. Also mercurial keeps filenodes as str so we need to decode
 
        from unicode to str
 
        """
 
        if path.endswith('/'):
 
            path = path.rstrip('/')
 

	
 
        return safe_str(path)
 

	
 
    def _get_kind(self, path):
 
        path = self._fix_path(path)
 
        if path in self._file_paths:
 
            return NodeKind.FILE
 
        elif path in self._dir_paths:
 
            return NodeKind.DIR
 
        else:
 
            raise ChangesetError("Node does not exist at the given path '%s'"
 
                % (path))
 

	
 
    def _get_filectx(self, path):
 
        path = self._fix_path(path)
 
        if self._get_kind(path) != NodeKind.FILE:
 
            raise ChangesetError("File does not exist for revision %s at "
 
                " '%s'" % (self.raw_id, path))
 
        return self._ctx.filectx(path)
 

	
 
    def _extract_submodules(self):
 
        """
 
        returns a dictionary with submodule information from substate file
 
        of hg repository
 
        """
 
        return self._ctx.substate
 

	
 
    def get_file_mode(self, path):
 
        """
 
        Returns stat mode of the file at the given ``path``.
 
        """
 
        fctx = self._get_filectx(path)
 
        if 'x' in fctx.flags():
 
            return 0100755
 
        else:
 
            return 0100644
 

	
 
    def get_file_content(self, path):
 
        """
 
        Returns content of the file at given ``path``.
 
        """
 
        fctx = self._get_filectx(path)
 
        return fctx.data()
 

	
 
    def get_file_size(self, path):
 
        """
 
        Returns size of the file at given ``path``.
 
        """
 
        fctx = self._get_filectx(path)
 
        return fctx.size()
 

	
 
    def get_file_changeset(self, path):
 
        """
 
        Returns last commit of the file at the given ``path``.
 
        """
 
        return self.get_file_history(path, limit=1)[0]
 

	
 
    def get_file_history(self, path, limit=None):
 
        """
 
        Returns history of file as reversed list of ``Changeset`` objects for
 
        which file at given ``path`` has been modified.
 
        """
 
        fctx = self._get_filectx(path)
 
        hist = []
 
        cnt = 0
 
        for cs in reversed([x for x in fctx.filelog()]):
 
            cnt += 1
 
            hist.append(hex(fctx.filectx(cs).node()))
 
            if limit and cnt == limit:
 
            if limit is not None and cnt == limit:
 
                break
 

	
 
        return [self.repository.get_changeset(node) for node in hist]
 

	
 
    def get_file_annotate(self, path):
 
        """
 
        Returns a generator of four element tuples with
 
            lineno, sha, changeset lazy loader and line
 
        """
 

	
 
        fctx = self._get_filectx(path)
 
        for i, annotate_data in enumerate(fctx.annotate()):
 
            ln_no = i + 1
 
            sha = hex(annotate_data[0].node())
 
            yield (ln_no, sha, lambda: self.repository.get_changeset(sha), annotate_data[1],)
 

	
 
    def fill_archive(self, stream=None, kind='tgz', prefix=None,
 
                     subrepos=False):
 
        """
 
        Fills up given stream.
 

	
 
        :param stream: file like object.
 
        :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``.
 
            Default: ``tgz``.
 
        :param prefix: name of root directory in archive.
 
            Default is repository name and changeset's raw_id joined with dash
 
            (``repo-tip.<KIND>``).
 
        :param subrepos: include subrepos in this archive.
 

	
 
        :raise ImproperArchiveTypeError: If given kind is wrong.
 
        :raise VcsError: If given stream is None
 
        """
 

	
 
        allowed_kinds = settings.ARCHIVE_SPECS.keys()
 
        if kind not in allowed_kinds:
 
            raise ImproperArchiveTypeError('Archive kind not supported use one'
 
                'of %s', allowed_kinds)
 

	
 
        if stream is None:
 
            raise VCSError('You need to pass in a valid stream for filling'
 
                           ' with archival data')
 

	
 
        if prefix is None:
 
            prefix = '%s-%s' % (self.repository.name, self.short_id)
 
        elif prefix.startswith('/'):
 
            raise VCSError("Prefix cannot start with leading slash")
 
        elif prefix.strip() == '':
 
            raise VCSError("Prefix cannot be empty")
 

	
 
        archival.archive(self.repository._repo, stream, self.raw_id,
 
                         kind, prefix=prefix, subrepos=subrepos)
 

	
 
    def get_nodes(self, path):
 
        """
 
        Returns combined ``DirNode`` and ``FileNode`` objects list representing
 
        state of changeset at the given ``path``. If node at the given ``path``
 
        is not instance of ``DirNode``, ChangesetError would be raised.
 
        """
 

	
 
        if self._get_kind(path) != NodeKind.DIR:
 
            raise ChangesetError("Directory does not exist for revision %s at "
 
                " '%s'" % (self.revision, path))
 
        path = self._fix_path(path)
 

	
 
        filenodes = [FileNode(f, changeset=self) for f in self._file_paths
 
            if os.path.dirname(f) == path]
 
        dirs = path == '' and '' or [d for d in self._dir_paths
 
            if d and posixpath.dirname(d) == path]
 
        dirnodes = [DirNode(d, changeset=self) for d in dirs
 
            if os.path.dirname(d) == path]
 

	
 
        als = self.repository.alias
 
        for k, vals in self._extract_submodules().iteritems():
 
            #vals = url,rev,type
 
            loc = vals[0]
 
            cs = vals[1]
 
            dirnodes.append(SubModuleNode(k, url=loc, changeset=cs,
 
                                          alias=als))
 
        nodes = dirnodes + filenodes
 
        # cache nodes
 
        for node in nodes:
 
            self.nodes[node.path] = node
 
        nodes.sort()
 

	
 
        return nodes
 

	
 
    def get_node(self, path):
 
        """
 
        Returns ``Node`` object from the given ``path``. If there is no node at
 
        the given ``path``, ``ChangesetError`` would be raised.
 
        """
 

	
 
        path = self._fix_path(path)
 

	
 
        if not path in self.nodes:
 
            if path in self._file_paths:
 
                node = FileNode(path, changeset=self)
 
            elif path in self._dir_paths or path in self._dir_paths:
 
                if path == '':
 
                    node = RootNode(changeset=self)
 
                else:
 
                    node = DirNode(path, changeset=self)
 
            else:
 
                raise NodeDoesNotExistError("There is no file nor directory "
 
                    "at the given path: '%s' at revision %s"
 
                    % (path, self.short_id))
 
            # cache node
 
            self.nodes[path] = node
 
        return self.nodes[path]
 

	
 
    @LazyProperty
 
    def affected_files(self):
 
        """
 
        Gets a fast accessible file changes for given changeset
 
        """
 
        return self._ctx.files()
 

	
 
    @property
 
    def added(self):
 
        """
 
        Returns list of added ``FileNode`` objects.
 
        """
 
        return AddedFileNodesGenerator([n for n in self.status[1]], self)
 

	
 
    @property
 
    def changed(self):
 
        """
 
        Returns list of modified ``FileNode`` objects.
 
        """
 
        return ChangedFileNodesGenerator([n for n in  self.status[0]], self)
 

	
 
    @property
 
    def removed(self):
 
        """
 
        Returns list of removed ``FileNode`` objects.
 
        """
 
        return RemovedFileNodesGenerator([n for n in self.status[2]], self)
 

	
 
    @LazyProperty
 
    def extra(self):
 
        return self._ctx.extra()
kallithea/model/repo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import os
 
import shutil
 
import logging
 
import traceback
 
from datetime import datetime
 
from sqlalchemy.orm import subqueryload
 

	
 
from kallithea.lib.utils import make_ui
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.lib.compat import json
 
from kallithea.lib.utils2 import LazyProperty, safe_str, safe_unicode, \
 
    remove_prefix, obfuscate_url_pw, get_current_authuser
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.hooks import log_delete_repository
 

	
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, UserRepoToPerm, UserGroupRepoToPerm, \
 
    UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, \
 
    Statistics, UserGroup, Ui, RepoGroup, RepositoryField
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionAny, HasUserGroupPermissionAny
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.model.scm import UserGroupList
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(BaseModel):
 

	
 
    cls = Repository
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _get_user_group(self, users_group):
 
        return self._get_instance(UserGroup, users_group,
 
                                  callback=UserGroup.get_by_group_name)
 

	
 
    def _get_repo_group(self, repo_group):
 
        return self._get_instance(RepoGroup, repo_group,
 
                                  callback=RepoGroup.get_by_group_name)
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
        repo_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        repo_to_perm.repository = repository
 
        repo_to_perm.user_id = def_user.user_id
 

	
 
        return repo_to_perm
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 
        return q.ui_value
 

	
 
    def get(self, repo_id, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_id == repo_id)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_id))
 
        return repo.scalar()
 

	
 
    def get_repo(self, repository):
 
        return self._get_repo(repository)
 

	
 
    def get_by_repo_name(self, repo_name, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_name == repo_name)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_name))
 
        return repo.scalar()
 

	
 
    def get_all_user_repos(self, user):
 
        """
 
        Gets all repositories that user have at least read access
 

	
 
        :param user:
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        user = self._get_user(user)
 
        repos = AuthUser(user_id=user.user_id).permissions['repositories']
 
        access_check = lambda r: r[1] in ['repository.read',
 
                                          'repository.write',
 
                                          'repository.admin']
 
        repos = [x[0] for x in filter(access_check, repos.items())]
 
        return Repository.query().filter(Repository.repo_name.in_(repos))
 

	
 
    def get_users_js(self):
 
        users = self.sa.query(User).filter(User.active == True).all()
 
        return json.dumps([
 
            {
 
                'id': u.user_id,
 
                'fname': h.escape(u.name),
 
                'lname': h.escape(u.lastname),
 
                'nname': u.username,
 
                'gravatar_lnk': h.gravatar_url(u.email, size=28),
 
                'gravatar_size': 14,
 
            } for u in users]
 
        )
 

	
 
    def get_user_groups_js(self):
 
        user_groups = self.sa.query(UserGroup) \
 
            .filter(UserGroup.users_group_active == True) \
 
            .options(subqueryload(UserGroup.members)) \
 
            .all()
 
        user_groups = UserGroupList(user_groups, perm_set=['usergroup.read',
 
                                                           'usergroup.write',
 
                                                           'usergroup.admin'])
 
        return json.dumps([
 
            {
 
                'id': gr.users_group_id,
 
                'grname': gr.users_group_name,
 
                'grmembers': len(gr.members),
 
            } for gr in user_groups]
 
        )
 

	
 
    @classmethod
 
    def _render_datatable(cls, tmpl, *args, **kwargs):
 
        import kallithea
 
        from pylons import tmpl_context as c
 
        from pylons.i18n.translation import _
 

	
 
        _tmpl_lookup = kallithea.CONFIG['pylons.app_globals'].mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        tmpl = template.get_def(tmpl)
 
        kwargs.update(dict(_=_, h=h, c=c))
 
        return tmpl.render(*args, **kwargs)
 

	
 
    @classmethod
 
    def update_repoinfo(cls, repositories=None):
 
        if not repositories:
 
        if repositories is None:
 
            repositories = Repository.getAll()
 
        for repo in repositories:
 
            repo.update_changeset_cache()
 

	
 
    def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
 
                          super_user_actions=False):
 
        _render = self._render_datatable
 
        from pylons import tmpl_context as c
 

	
 
        def quick_menu(repo_name):
 
            return _render('quick_menu', repo_name)
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=not admin, admin=False)
 

	
 
        def last_change(last_change):
 
            return _render("last_change", last_change)
 

	
 
        def rss_lnk(repo_name):
 
            return _render("rss", repo_name)
 

	
 
        def atom_lnk(repo_name):
 
            return _render("atom", repo_name)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            return h.urlify_text(desc, truncate=60, stylize=c.visual.stylify_metatags)
 

	
 
        def state(repo_state):
 
            return _render("repo_state", repo_state)
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name, super_user_actions)
 

	
 
        def owner_actions(user_id, username):
 
            return _render('user_name', user_id, username)
 

	
 
        repos_data = []
 
        for repo in repos_list:
 
            if perm_check:
 
                # check permission at this level
 
                if not HasRepoPermissionAny(
 
                        'repository.read', 'repository.write',
 
                        'repository.admin'
 
                )(repo.repo_name, 'get_repos_as_dict check'):
 
                    continue
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "menu": quick_menu(repo.repo_name),
 
                "raw_name": repo.repo_name.lower(),
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_change": last_change(repo.last_db_change),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "desc": desc(repo.description),
 
                "owner": h.person(repo.user),
 
                "state": state(repo.repo_state),
 
                "rss": rss_lnk(repo.repo_name),
 
                "atom": atom_lnk(repo.repo_name),
 

	
 
            }
 
            if admin:
 
                row.update({
 
                    "action": repo_actions(repo.repo_name),
 
                    "owner": owner_actions(repo.user.user_id,
 
                                           h.person(repo.user))
 
                })
 
            repos_data.append(row)
 

	
 
        return {
 
            "totalRecords": len(repos_list),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": repos_data
 
        }
 

	
 
    def _get_defaults(self, repo_name):
 
        """
 
        Gets information about repository, and returns a dict for
 
        usage in forms
 

	
 
        :param repo_name:
 
        """
 

	
 
        repo_info = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_info is None:
 
            return None
 

	
 
        defaults = repo_info.get_dict()
 
        group, repo_name, repo_name_full = repo_info.groups_and_repo
 
        defaults['repo_name'] = repo_name
 
        defaults['repo_group'] = getattr(group[-1] if group else None,
 
                                         'group_id', None)
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'), (1, 'repo_enable_locking'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.user:
 
            defaults.update({'user': repo_info.user.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'user': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = self._get_repo(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'user' in kwargs:
 
                cur_repo.user = User.get_by_username(kwargs['user'])
 

	
 
            if 'repo_group' in kwargs:
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_enable_locking',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                cur_repo.clone_uri = clone_uri
 

	
 
            new_name = cur_repo.get_new_name(kwargs['repo_name'])
 
            cur_repo.repo_name = new_name
 
            #if private flag is set, reset default permission to NONE
 

	
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                #handle extra fields
 
            for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
 
                                kwargs):
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 
                    self.sa.add(ex_field)
 
            self.sa.add(cur_repo)
 

	
 
            if org_repo_name != new_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=new_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
 
                     private=False, clone_uri=None, repo_group=None,
 
                     landing_rev='rev:tip', fork_of=None,
 
                     copy_fork_permissions=False, enable_statistics=False,
 
                     enable_locking=False, enable_downloads=False,
 
                     copy_group_permissions=False, state=Repository.STATE_PENDING):
 
        """
 
        Create repository inside database with PENDING state. This should only be
 
        executed by create() repo, with exception of importing existing repos.
 

	
 
        """
 
        from kallithea.model.scm import ScmModel
 

	
 
        owner = self._get_user(owner)
 
        fork_of = self._get_repo(fork_of)
 
        repo_group = self._get_repo_group(repo_group)
 
        try:
 
            repo_name = safe_unicode(repo_name)
 
            description = safe_unicode(description)
 
            # repo name is just a name of repository
 
            # while repo_name_full is a full qualified name that is combined
 
            # with name and path of group
 
            repo_name_full = repo_name
 
            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
 

	
 
            new_repo = Repository()
 
            new_repo.repo_state = state
 
            new_repo.enable_statistics = False
 
            new_repo.repo_name = repo_name_full
 
            new_repo.repo_type = repo_type
 
            new_repo.user = owner
 
            new_repo.group = repo_group
 
            new_repo.description = description or repo_name
 
            new_repo.private = private
 
            new_repo.clone_uri = clone_uri
 
            new_repo.landing_rev = landing_rev
 

	
 
            new_repo.enable_statistics = enable_statistics
 
            new_repo.enable_locking = enable_locking
 
            new_repo.enable_downloads = enable_downloads
 

	
 
            if repo_group:
 
                new_repo.enable_locking = repo_group.enable_locking
 

	
 
            if fork_of:
 
                parent_repo = fork_of
 
                new_repo.fork = parent_repo
 

	
 
            self.sa.add(new_repo)
 

	
 
            if fork_of and copy_fork_permissions:
 
                repo = fork_of
 
                user_perms = UserRepoToPerm.query() \
 
                    .filter(UserRepoToPerm.repository == repo).all()
 
                group_perms = UserGroupRepoToPerm.query() \
 
                    .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                for perm in user_perms:
 
                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 

	
 
                for perm in group_perms:
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                               perm.permission)
 

	
 
            elif repo_group and copy_group_permissions:
 

	
 
                user_perms = UserRepoGroupToPerm.query() \
 
                    .filter(UserRepoGroupToPerm.group == repo_group).all()
 

	
 
                group_perms = UserGroupRepoGroupToPerm.query() \
 
                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
 

	
 
                for perm in user_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 

	
 
                for perm in group_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 

	
 
            else:
 
                perm_obj = self._create_default_perms(new_repo, private)
 
                self.sa.add(perm_obj)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
 
                                                    owner.user_id)
 
            # we need to flush here, in order to check if database won't
 
            # throw any exceptions, create filesystem dirs at the very end
 
            self.sa.flush()
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user):
 
        """
 
        Create repository using celery tasks
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 
        return run_task(tasks.create_repo, form_data, cur_user)
 

	
 
    def _update_permissions(self, repo, perms_new=None, perms_updates=None,
 
                            check_perms=True):
 
        if not perms_new:
 
            perms_new = []
 
        if not perms_updates:
 
            perms_updates = []
 

	
 
        # update permissions
 
        for member, perm, member_type in perms_updates:
 
            if member_type == 'user':
 
                # this updates existing one
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 
            # set new permissions
 
        for member, perm, member_type in perms_new:
 
            if member_type == 'user':
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 

	
 
    def create_fork(self, form_data, cur_user):
 
        """
 
        Simple wrapper into executing celery task for fork creation
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 
        return run_task(tasks.create_repo_fork, form_data, cur_user)
 

	
 
    def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
 
        """
 
        Delete given repository, forks parameter defines what do do with
 
        attached forks. Throws AttachedForksError if deleted repo has attached
 
        forks
 

	
 
        :param repo:
 
        :param forks: str 'delete' or 'detach'
 
        :param fs_remove: remove(archive) repo from filesystem
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        repo = self._get_repo(repo)
 
        if repo is not None:
 
            if forks == 'detach':
 
                for r in repo.forks:
 
                    r.fork = None
 
                    self.sa.add(r)
 
            elif forks == 'delete':
 
                for r in repo.forks:
 
                    self.delete(r, forks='delete')
 
            elif [f for f in repo.forks]:
 
                raise AttachedForksError()
 

	
 
            old_repo_dict = repo.get_dict()
 
            try:
 
                self.sa.delete(repo)
 
                if fs_remove:
 
                    self._delete_filesystem_repo(repo)
 
                else:
 
                    log.debug('skipping removal from filesystem')
 
                log_delete_repository(old_repo_dict,
 
                                      deleted_by=cur_user)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
    def grant_user_permission(self, repo, user, perm):
 
        """
 
        Grant permission for user on given repository, or update existing one
 
        if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserRepoToPerm()
 
        obj.repository = repo
 
        obj.user = user
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, user, repo)
 
        return obj
 

	
 
    def revoke_user_permission(self, repo, user):
 
        """
 
        Revoke permission for user on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 

	
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm on %s on %s', repo, user)
 

	
 
    def grant_user_group_permission(self, repo, group_name, perm):
 
        """
 
        Grant permission for user group on given repository, or update
 
        existing one if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self._get_user_group(group_name)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .scalar()
 

	
 
        if obj is None:
 
            # create new
 
            obj = UserGroupRepoToPerm()
 

	
 
        obj.repository = repo
 
        obj.users_group = group_name
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, repo, group_name):
 
        """
 
        Revoke permission for user group on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self._get_user_group(group_name)
 

	
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm to %s on %s', repo, group_name)
 

	
 
    def delete_stats(self, repo_name):
 
        """
 
        removes stats for given repo
 

	
 
        :param repo_name:
 
        """
 
        repo = self._get_repo(repo_name)
 
        try:
 
            obj = self.sa.query(Statistics) \
 
                .filter(Statistics.repository == repo).scalar()
 
            if obj is not None:
 
                self.sa.delete(obj)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
 
                                clone_uri=None, repo_store_location=None):
 
        """
 
        Makes repository on filesystem. Operation is group aware, meaning that it will create
 
        a repository within a group, and alter the paths accordingly to the group location.
 

	
 
        :param repo_name:
 
        :param alias:
 
        :param parent:
 
        :param clone_uri:
 
        :param repo_store_location:
 
        """
 
        from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
 
        from kallithea.model.scm import ScmModel
 

	
 
        if '/' in repo_name:
 
            raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
 
        else:
 
            new_parent_path = repo_group or ''
 

	
 
        if repo_store_location:
 
            _paths = [repo_store_location]
 
        else:
 
            _paths = [self.repos_path, new_parent_path, repo_name]
 
            # we need to make it str for mercurial
 
        repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
 

	
 
        # check if this path is not a repository
 
        if is_valid_repo(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid repository' % repo_path)
 

	
 
        # check if this path is a group
 
        if is_valid_repo_group(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid group' % repo_path)
 

	
 
        log.info('creating repo %s in %s from url: `%s`',
 
            repo_name, safe_unicode(repo_path),
 
            obfuscate_url_pw(clone_uri))
 

	
 
        backend = get_backend(repo_type)
 

	
 
        if repo_type == 'hg':
 
            baseui = make_ui('db', clear_session=False)
 
            # patch and reset hooks section of UI config to not run any
 
            # hooks on creating remote repo
 
            for k, v in baseui.configitems('hooks'):
 
                baseui.setconfig('hooks', k, None)
 

	
 
            repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui)
 
        elif repo_type == 'git':
 
            repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
 
            # add kallithea hook into this repo
 
            ScmModel().install_git_hooks(repo=repo)
 
        else:
 
            raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 

	
 
        log.debug('Created repo %s with %s backend',
 
                  safe_unicode(repo_name), safe_unicode(repo_type))
 
        return repo
 

	
 
    def _rename_filesystem_repo(self, old, new):
 
        """
 
        renames repository on filesystem
 

	
 
        :param old: old name
 
        :param new: new name
 
        """
 
        log.info('renaming repo from %s to %s', old, new)
 

	
 
        old_path = os.path.join(self.repos_path, old)
 
        new_path = os.path.join(self.repos_path, new)
 
        if os.path.isdir(new_path):
 
            raise Exception(
 
                'Was trying to rename to already existing dir %s' % new_path
 
            )
 
        shutil.move(old_path, new_path)
 

	
 
    def _delete_filesystem_repo(self, repo):
 
        """
 
        removes repo from filesystem, the removal is actually done by
 
        renaming dir to a 'rm__*' prefix which Kallithea will skip.
 
        It can be undeleted later by reverting the rename.
 

	
 
        :param repo: repo object
 
        """
 
        rm_path = os.path.join(self.repos_path, repo.repo_name)
 
        log.info("Removing %s", rm_path)
 

	
 
        _now = datetime.now()
 
        _ms = str(_now.microsecond).rjust(6, '0')
 
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
 
                             repo.just_name)
 
        if repo.group:
 
            args = repo.group.full_path_splitted + [_d]
 
            _d = os.path.join(*args)
 
        shutil.move(rm_path, os.path.join(self.repos_path, _d))
kallithea/public/js/base.js
Show inline comments
 
/**
 
Kallithea JS Files
 
**/
 
'use strict';
 

	
 
if (typeof console == "undefined" || typeof console.log == "undefined"){
 
    console = { log: function() {} }
 
}
 

	
 
/**
 
 * INJECT .format function into String
 
 * Usage: "My name is {0} {1}".format("Johny","Bravo")
 
 * Return "My name is Johny Bravo"
 
 * Inspired by https://gist.github.com/1049426
 
 */
 
String.prototype.format = function() {
 
    function format() {
 
        var str = this;
 
        var len = arguments.length+1;
 
        var safe = undefined;
 
        var arg = undefined;
 

	
 
        // For each {0} {1} {n...} replace with the argument in that position.  If
 
        // the argument is an object or an array it will be stringified to JSON.
 
        for (var i=0; i < len; arg = arguments[i++]) {
 
            safe = typeof arg === 'object' ? JSON.stringify(arg) : arg;
 
            str = str.replace(RegExp('\\{'+(i-1)+'\\}', 'g'), safe);
 
        }
 
        return str;
 
    }
 

	
 
    // Save a reference of what may already exist under the property native.
 
    // Allows for doing something like: if("".format.native) { /* use native */ }
 
    format.native = String.prototype.format;
 

	
 
    // Replace the prototype property
 
    return format;
 

	
 
}();
 

	
 
String.prototype.strip = function(char) {
 
    if(char === undefined){
 
        char = '\\s';
 
    }
 
    return this.replace(new RegExp('^'+char+'+|'+char+'+$','g'), '');
 
}
 

	
 
String.prototype.lstrip = function(char) {
 
    if(char === undefined){
 
        char = '\\s';
 
    }
 
    return this.replace(new RegExp('^'+char+'+'),'');
 
}
 

	
 
String.prototype.rstrip = function(char) {
 
    if(char === undefined){
 
        char = '\\s';
 
    }
 
    return this.replace(new RegExp(''+char+'+$'),'');
 
}
 

	
 
/* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/indexOf#Polyfill
 
   under MIT license / public domain, see
 
   https://developer.mozilla.org/en-US/docs/MDN/About#Copyrights_and_licenses */
 
if(!Array.prototype.indexOf) {
 
    Array.prototype.indexOf = function (searchElement, fromIndex) {
 
        if ( this === undefined || this === null ) {
 
            throw new TypeError( '"this" is null or not defined' );
 
        }
 

	
 
        var length = this.length >>> 0; // Hack to convert object.length to a UInt32
 

	
 
        fromIndex = +fromIndex || 0;
 

	
 
        if (Math.abs(fromIndex) === Infinity) {
 
            fromIndex = 0;
 
        }
 

	
 
        if (fromIndex < 0) {
 
            fromIndex += length;
 
            if (fromIndex < 0) {
 
                fromIndex = 0;
 
            }
 
        }
 

	
 
        for (;fromIndex < length; fromIndex++) {
 
            if (this[fromIndex] === searchElement) {
 
                return fromIndex;
 
            }
 
        }
 

	
 
        return -1;
 
    };
 
}
 

	
 
/* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter#Compatibility
 
   under MIT license / public domain, see
 
   https://developer.mozilla.org/en-US/docs/MDN/About#Copyrights_and_licenses */
 
if (!Array.prototype.filter)
 
{
 
    Array.prototype.filter = function(fun /*, thisArg */)
 
    {
 
        if (this === void 0 || this === null)
 
            throw new TypeError();
 

	
 
        var t = Object(this);
 
        var len = t.length >>> 0;
 
        if (typeof fun !== "function")
 
            throw new TypeError();
 

	
 
        var res = [];
 
        var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
 
        for (var i = 0; i < len; i++)
 
        {
 
            if (i in t)
 
            {
 
                var val = t[i];
 

	
 
                // NOTE: Technically this should Object.defineProperty at
 
                //       the next index, as push can be affected by
 
                //       properties on Object.prototype and Array.prototype.
 
                //       But that method's new, and collisions should be
 
                //       rare, so use the more-compatible alternative.
 
                if (fun.call(thisArg, val, i, t))
 
                    res.push(val);
 
            }
 
        }
 

	
 
        return res;
 
    };
 
}
 

	
 
/**
 
 * A customized version of PyRoutes.JS from https://pypi.python.org/pypi/pyroutes.js/
 
 * which is copyright Stephane Klein and was made available under the BSD License.
 
 *
 
 * Usage pyroutes.url('mark_error_fixed',{"error_id":error_id}) // /mark_error_fixed/<error_id>
 
 */
 
var pyroutes = (function() {
 
    var matchlist = {};
 
    var sprintf = (function() {
 
        function get_type(variable) {
 
            return Object.prototype.toString.call(variable).slice(8, -1).toLowerCase();
 
        }
 
        function str_repeat(input, multiplier) {
 
            for (var output = []; multiplier > 0; output[--multiplier] = input) {/* do nothing */}
 
            return output.join('');
 
        }
 

	
 
        var str_format = function() {
 
            if (!str_format.cache.hasOwnProperty(arguments[0])) {
 
                str_format.cache[arguments[0]] = str_format.parse(arguments[0]);
 
            }
 
            return str_format.format.call(null, str_format.cache[arguments[0]], arguments);
 
        };
 

	
 
        str_format.format = function(parse_tree, argv) {
 
            var cursor = 1, tree_length = parse_tree.length, node_type = '', arg, output = [], i, k, match, pad, pad_character, pad_length;
 
            for (i = 0; i < tree_length; i++) {
 
                node_type = get_type(parse_tree[i]);
 
                if (node_type === 'string') {
 
                    output.push(parse_tree[i]);
 
                }
 
                else if (node_type === 'array') {
 
                    match = parse_tree[i]; // convenience purposes only
 
                    if (match[2]) { // keyword argument
 
                        arg = argv[cursor];
 
                        for (k = 0; k < match[2].length; k++) {
 
                            if (!arg.hasOwnProperty(match[2][k])) {
 
                                throw(sprintf('[sprintf] property "%s" does not exist', match[2][k]));
 
                            }
 
                            arg = arg[match[2][k]];
 
                        }
 
                    }
 
                    else if (match[1]) { // positional argument (explicit)
 
                        arg = argv[match[1]];
 
                    }
 
                    else { // positional argument (implicit)
 
                        arg = argv[cursor++];
 
                    }
 

	
 
                    if (/[^s]/.test(match[8]) && (get_type(arg) != 'number')) {
 
                        throw(sprintf('[sprintf] expecting number but found %s', get_type(arg)));
 
                    }
 
                    switch (match[8]) {
 
                        case 'b': arg = arg.toString(2); break;
 
                        case 'c': arg = String.fromCharCode(arg); break;
 
                        case 'd': arg = parseInt(arg, 10); break;
 
                        case 'e': arg = match[7] ? arg.toExponential(match[7]) : arg.toExponential(); break;
 
                        case 'f': arg = match[7] ? parseFloat(arg).toFixed(match[7]) : parseFloat(arg); break;
 
                        case 'o': arg = arg.toString(8); break;
 
                        case 's': arg = ((arg = String(arg)) && match[7] ? arg.substring(0, match[7]) : arg); break;
 
                        case 'u': arg = Math.abs(arg); break;
 
                        case 'x': arg = arg.toString(16); break;
 
                        case 'X': arg = arg.toString(16).toUpperCase(); break;
 
                    }
 
                    arg = (/[def]/.test(match[8]) && match[3] && arg >= 0 ? '+'+ arg : arg);
 
                    pad_character = match[4] ? match[4] == '0' ? '0' : match[4].charAt(1) : ' ';
 
                    pad_length = match[6] - String(arg).length;
 
                    pad = match[6] ? str_repeat(pad_character, pad_length) : '';
 
                    output.push(match[5] ? arg + pad : pad + arg);
 
                }
 
            }
 
            return output.join('');
 
        };
 

	
 
        str_format.cache = {};
 

	
 
        str_format.parse = function(fmt) {
 
            var _fmt = fmt, match = [], parse_tree = [], arg_names = 0;
 
            while (_fmt) {
 
                if ((match = /^[^\x25]+/.exec(_fmt)) !== null) {
 
                    parse_tree.push(match[0]);
 
                }
 
                else if ((match = /^\x25{2}/.exec(_fmt)) !== null) {
 
                    parse_tree.push('%');
 
                }
 
                else if ((match = /^\x25(?:([1-9]\d*)\$|\(([^\)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-fosuxX])/.exec(_fmt)) !== null) {
 
                    if (match[2]) {
 
                        arg_names |= 1;
 
                        var field_list = [], replacement_field = match[2], field_match = [];
 
                        if ((field_match = /^([a-z_][a-z_\d]*)/i.exec(replacement_field)) !== null) {
 
                            field_list.push(field_match[1]);
 
                            while ((replacement_field = replacement_field.substring(field_match[0].length)) !== '') {
 
                                if ((field_match = /^\.([a-z_][a-z_\d]*)/i.exec(replacement_field)) !== null) {
 
                                    field_list.push(field_match[1]);
 
                                }
 
                                else if ((field_match = /^\[(\d+)\]/.exec(replacement_field)) !== null) {
 
                                    field_list.push(field_match[1]);
 
                                }
 
                                else {
 
                                    throw('[sprintf] huh?');
 
                                }
 
                            }
 
                        }
 
                        else {
 
                            throw('[sprintf] huh?');
 
                        }
 
                        match[2] = field_list;
 
                    }
 
                    else {
 
                        arg_names |= 2;
 
                    }
 
                    if (arg_names === 3) {
 
                        throw('[sprintf] mixing positional and named placeholders is not (yet) supported');
 
                    }
 
                    parse_tree.push(match);
 
                }
 
                else {
 
                    throw('[sprintf] huh?');
 
                }
 
                _fmt = _fmt.substring(match[0].length);
 
            }
 
            return parse_tree;
 
        };
 

	
 
        return str_format;
 
    })();
 

	
 
    var vsprintf = function(fmt, argv) {
 
        argv.unshift(fmt);
 
        return sprintf.apply(null, argv);
 
    };
 
    return {
 
        'url': function(route_name, params) {
 
            var result = route_name;
 
            if (typeof(params) != 'object'){
 
                params = {};
 
            }
 
            if (matchlist.hasOwnProperty(route_name)) {
 
                var route = matchlist[route_name];
 
                // param substitution
 
                for(var i=0; i < route[1].length; i++) {
 
                   if (!params.hasOwnProperty(route[1][i]))
 
                        throw new Error(route[1][i] + ' missing in "' + route_name + '" route generation');
 
                }
 
                result = sprintf(route[0], params);
 

	
 
                var ret = [];
 
                //extra params => GET
 
                for(var param in params){
 
                    if (route[1].indexOf(param) == -1){
 
                        ret.push(encodeURIComponent(param) + "=" + encodeURIComponent(params[param]));
 
                    }
 
                }
 
                var _parts = ret.join("&");
 
                if(_parts){
 
                    result = result +'?'+ _parts
 
                }
 
            }
 

	
 
            return result;
 
        },
 
        'register': function(route_name, route_tmpl, req_params) {
 
            if (typeof(req_params) != 'object') {
 
                req_params = [];
 
            }
 
            var keys = [];
 
            for (var i=0; i < req_params.length; i++) {
 
                keys.push(req_params[i]);
 
            }
 
            matchlist[route_name] = [
 
                unescape(route_tmpl),
 
                keys
 
            ]
 
        },
 
        '_routes': function(){
 
            return matchlist;
 
        }
 
    }
 
})();
 

	
 

	
 
/**
 
 * GLOBAL YUI Shortcuts
 
 */
 
var YUD = YAHOO.util.Dom;
 
var YUE = YAHOO.util.Event;
 

	
 
/* Invoke all functions in callbacks */
 
var _run_callbacks = function(callbacks){
 
    if (callbacks !== undefined){
 
        var _l = callbacks.length;
 
        for (var i=0;i<_l;i++){
 
            var func = callbacks[i];
 
            if(typeof(func)=='function'){
 
                try{
 
                    func();
 
                }catch (err){};
 
            }
 
        }
 
    }
 
}
 

	
 
/**
 
 * turns objects into GET query string
 
 */
 
var _toQueryString = function(o) {
 
    if(typeof o !== 'object') {
 
        return false;
 
    }
 
    var _p, _qs = [];
 
    for(_p in o) {
 
        _qs.push(encodeURIComponent(_p) + '=' + encodeURIComponent(o[_p]));
 
    }
 
    return _qs.join('&');
 
};
 

	
 
/**
 
 * Load HTML into DOM using Ajax
 
 *
 
 * @param $target: load html async and place it (or an error message) here
 
 * @param success: success callback function
 
 * @param args: query parameters to pass to url
 
 */
 
function asynchtml(url, $target, success, args){
 
    if(args===undefined){
 
        args=null;
 
    }
 
    $target.html(_TM['Loading ...']).css('opacity','0.3');
 

	
 
    return $.ajax({url: url, data: args, headers: {'X-PARTIAL-XHR': '1'}, cache: false, dataType: 'html'})
 
        .done(function(html) {
 
                $target.html(html);
 
                $target.css('opacity','1.0');
 
                //execute the given original callback
 
                if (success !== undefined && success) {
 
                    success();
 
                }
 
            })
 
        .fail(function(jqXHR, textStatus, errorThrown) {
 
                if (textStatus == "abort")
 
                    return;
 
                $target.html('<span class="error_red">ERROR: {0}</span>'.format(textStatus));
 
                $target.css('opacity','1.0');
 
            })
 
        ;
 
};
 

	
 
var ajaxGET = function(url, success, failure) {
 
    if(failure === undefined) {
 
        failure = function(jqXHR, textStatus, errorThrown) {
 
                if (textStatus != "abort")
 
                    alert("Ajax GET error: " + textStatus);
 
            };
 
    }
 
    return $.ajax({url: url, headers: {'X-PARTIAL-XHR': '1'}, cache: false})
 
        .done(success)
 
        .fail(failure);
 
};
 

	
 
var ajaxPOST = function(url, postData, success, failure) {
 
    postData['_authentication_token'] = _authentication_token;
 
    var postData = _toQueryString(postData);
 
    if(failure === undefined) {
 
        failure = function(jqXHR, textStatus, errorThrown) {
 
                if (textStatus != "abort")
 
                    alert("Error posting to server: " + textStatus);
 
            };
 
    }
 
    return $.ajax({url: url, data: postData, type: 'POST', headers: {'X-PARTIAL-XHR': '1'}, cache: false})
 
        .done(success)
 
        .fail(failure);
 
};
 

	
 

	
 
/**
 
 * activate .show_more links
 
 * the .show_more must have an id that is the the id of an element to hide prefixed with _
 
 * the parentnode will be displayed
 
 */
 
var show_more_event = function(){
 
    $('.show_more').click(function(e){
 
        var el = e.currentTarget;
 
        $('#' + el.id.substring(1)).hide();
 
        $(el.parentNode).show();
 
    });
 
};
 

	
 
/**
 
 * activate .lazy-cs mouseover for showing changeset tooltip
 
 */
 
var show_changeset_tooltip = function(){
 
    $('.lazy-cs').mouseover(function(e){
 
        var $target = $(e.currentTarget);
 
        var rid = $target.attr('raw_id');
 
        var repo_name = $target.attr('repo_name');
 
        if(rid && !$target.hasClass('tooltip')){
 
            _show_tooltip(e, _TM['loading ...']);
 
            var url = pyroutes.url('changeset_info', {"repo_name": repo_name, "revision": rid});
 
            ajaxGET(url, function(json){
 
                    $target.addClass('tooltip');
 
                    _show_tooltip(e, json['message']);
 
                    _activate_tooltip($target);
 
                });
 
        }
 
    });
 
};
 

	
 
var _onSuccessFollow = function(target){
 
    var $target = $(target);
 
    var $f_cnt = $('#current_followers_count');
 
    if ($target.hasClass('follow')) {
 
        $target.removeClass('follow').addClass('following');
 
        $target.prop('title', _TM['Stop following this repository']);
 
        if ($f_cnt.html()) {
 
            var cnt = Number($f_cnt.html())+1;
 
            $f_cnt.html(cnt);
 
        }
 
    } else {
 
        $target.removeClass('following').addClass('follow');
 
        $target.prop('title', _TM['Start following this repository']);
 
        if ($f_cnt.html()) {
 
            var cnt = Number($f_cnt.html())-1;
 
            $f_cnt.html(cnt);
 
        }
 
    }
 
}
 

	
 
var toggleFollowingRepo = function(target, follows_repo_id){
 
    var args = 'follows_repo_id=' + follows_repo_id;
 
    args += '&amp;_authentication_token=' + _authentication_token;
 
    $.post(TOGGLE_FOLLOW_URL, args, function(data){
 
            _onSuccessFollow(target);
 
        });
 
    return false;
 
};
 

	
 
var showRepoSize = function(target, repo_name){
 
    var args = '_authentication_token=' + _authentication_token;
 

	
 
    if(!$("#" + target).hasClass('loaded')){
 
        $("#" + target).html(_TM['Loading ...']);
 
        var url = pyroutes.url('repo_size', {"repo_name":repo_name});
 
        $.post(url, args, function(data) {
 
            $("#" + target).html(data);
 
            $("#" + target).addClass('loaded');
 
        });
 
    }
 
    return false;
 
};
 

	
 
/**
 
 * tooltips
 
 */
 

	
 
var tooltip_activate = function(){
 
    $(document).ready(_init_tooltip);
 
};
 

	
 
var _activate_tooltip = function($tt){
 
    $tt.mouseover(_show_tooltip);
 
    $tt.mousemove(_move_tooltip);
 
    $tt.mouseout(_close_tooltip);
 
};
 

	
 
var _init_tooltip = function(){
 
    var $tipBox = $('#tip-box');
 
    if(!$tipBox.length){
 
        $tipBox = $('<div id="tip-box"></div>');
 
        $(document.body).append($tipBox);
 
    }
 

	
 
    $tipBox.hide();
 
    $tipBox.css('position', 'absolute');
 
    $tipBox.css('max-width', '600px');
 

	
 
    _activate_tooltip($('.tooltip'));
 
};
 

	
 
var _show_tooltip = function(e, tipText, safe){
 
    e.stopImmediatePropagation();
 
    var el = e.currentTarget;
 
    var $el = $(el);
 
    if(tipText){
 
        // just use it
 
    } else if(el.tagName.toLowerCase() === 'img'){
 
        tipText = el.alt ? el.alt : '';
 
    } else {
 
        tipText = el.title ? el.title : '';
 
        safe = safe || $el.hasClass("safe-html-title");
 
    }
 

	
 
    if(tipText !== ''){
 
        // save org title
 
        $el.attr('tt_title', tipText);
 
        // reset title to not show org tooltips
 
        $el.prop('title', '');
 

	
 
        var $tipBox = $('#tip-box');
 
        if (safe) {
 
            $tipBox.html(tipText);
 
        } else {
 
            $tipBox.text(tipText);
 
        }
 
        $tipBox.css('display', 'block');
 
    }
 
};
 

	
 
var _move_tooltip = function(e){
 
    e.stopImmediatePropagation();
 
    var $tipBox = $('#tip-box');
 
    $tipBox.css('top', (e.pageY + 15) + 'px');
 
    $tipBox.css('left', (e.pageX + 15) + 'px');
 
};
 

	
 
var _close_tooltip = function(e){
 
    e.stopImmediatePropagation();
 
    var $tipBox = $('#tip-box');
 
    $tipBox.hide();
 
    var el = e.currentTarget;
 
    $(el).prop('title', $(el).attr('tt_title'));
 
};
 

	
 
/**
 
 * Quick filter widget
 
 *
 
 * @param target: filter input target
 
 * @param nodes: list of nodes in html we want to filter.
 
 * @param display_element function that takes current node from nodes and
 
 *    does hide or show based on the node
 
 */
 
var q_filter = (function() {
 
    var _namespace = {};
 
    var namespace = function (target) {
 
        if (!(target in _namespace)) {
 
            _namespace[target] = {};
 
        }
 
        return _namespace[target];
 
    };
 
    return function (target, $nodes, display_element) {
 
        var $nodes = $nodes;
 
        var $q_filter_field = $('#' + target);
 
        var F = namespace(target);
 

	
 
        $q_filter_field.keyup(function (e) {
 
            clearTimeout(F.filterTimeout);
 
            F.filterTimeout = setTimeout(F.updateFilter, 600);
 
        });
 

	
 
        F.filterTimeout = null;
 

	
 
        F.updateFilter = function () {
 
            // Reset timeout
 
            F.filterTimeout = null;
 

	
 
            var obsolete = [];
 

	
 
            var req = $q_filter_field.val().toLowerCase();
 

	
 
            var showing = 0;
 
            $nodes.each(function () {
 
                var n = this;
 
                var target_element = display_element(n);
 
                if (req && n.innerHTML.toLowerCase().indexOf(req) == -1) {
 
                    $(target_element).hide();
 
                }
 
                else {
 
                    $(target_element).show();
 
                    showing += 1;
 
                }
 
            });
 

	
 
            $('#repo_count').html(showing);
 
            /* FIXME: don't hardcode */
 
        }
 
    }
 
})();
 

	
 

	
 
/**
 
 * Comment handling
 
 */
 

	
 
// move comments to their right location, inside new trs
 
function move_comments($anchorcomments) {
 
    $anchorcomments.each(function(i, anchorcomment) {
 
        var $anchorcomment = $(anchorcomment);
 
        var target_id = $anchorcomment.data('target-id');
 
        var $comment_div = _get_add_comment_div(target_id);
 
        var f_path = $anchorcomment.data('f_path');
 
        var line_no = $anchorcomment.data('line_no');
 
        if ($comment_div[0]) {
 
            $comment_div.append($anchorcomment.children());
 
            _comment_div_append_add($comment_div, f_path, line_no);
 
        } else {
 
           $anchorcomment.before("Comment to {0} line {1} which is outside the diff context:".format(f_path || '?', line_no || '?'));
 
        }
 
    });
 
    linkInlineComments($('.firstlink'), $('.comment:first-child'));
 
}
 

	
 
// comment bubble was clicked - insert new tr and show form
 
function show_comment_form($bubble) {
 
    var children = $bubble.closest('tr.line').children('[id]');
 
    var line_td_id = children[children.length - 1].id;
 
    var $comment_div = _get_add_comment_div(line_td_id);
 
    var f_path = $bubble.closest('div.full_f_path').data('f_path');
 
    var parts = line_td_id.split('_');
 
    var line_no = parts[parts.length-1];
 
    comment_div_state($comment_div, f_path, line_no, true);
 
}
 

	
 
// return comment div for target_id - add it if it doesn't exist yet
 
function _get_add_comment_div(target_id) {
 
    var comments_box_id = 'comments-' + target_id;
 
    var $comments_box = $('#' + comments_box_id);
 
    if (!$comments_box.length) {
 
        var html = '<tr><td id="{0}" colspan="3" class="inline-comments"></td></tr>'.format(comments_box_id);
 
        $('#' + target_id).closest('tr').after(html);
 
        $comments_box = $('#' + comments_box_id);
 
    }
 
    return $comments_box;
 
}
 

	
 
// set $comment_div state - showing or not showing form and Add button
 
function comment_div_state($comment_div, f_path, line_no, show_form) {
 
    var $forms = $comment_div.children('.comment-inline-form');
 
    var $buttonrow = $comment_div.children('.add-button-row');
 
    var $comments = $comment_div.children('.comment');
 
    if (show_form) {
 
        if (!$forms.length) {
 
            _comment_div_append_form($comment_div, f_path, line_no);
 
        }
 
    } else {
 
        $forms.remove();
 
    }
 
    $buttonrow.remove();
 
    if ($comments.length && !show_form) {
 
        _comment_div_append_add($comment_div, f_path, line_no);
 
    }
 
}
 

	
 
// append an Add button to $comment_div and hook it up to show form
 
function _comment_div_append_add($comment_div, f_path, line_no) {
 
    var addlabel = TRANSLATION_MAP['Add Another Comment'];
 
    var $add = $('<div class="add-button-row"><span class="btn btn-mini add-button">{0}</span></div>'.format(addlabel));
 
    $comment_div.append($add);
 
    $add.children('.add-button').click(function(e) {
 
        comment_div_state($comment_div, f_path, line_no, true);
 
    });
 
}
 

	
 
// append a comment form to $comment_div
 
function _comment_div_append_form($comment_div, f_path, line_no) {
 
    var $form_div = $($('#comment-inline-form-template').html().format(f_path, line_no))
 
        .addClass('comment-inline-form');
 
    $comment_div.append($form_div);
 
    var $form = $comment_div.find("form");
 

	
 
    $form.submit(function(e) {
 
        e.preventDefault();
 

	
 
        var text = $('#text_'+line_no).val();
 
        if (!text){
 
            return;
 
        }
 

	
 
        $form.find('.submitting-overlay').show();
 

	
 
        var success = function(json_data) {
 
            $comment_div.append(json_data['rendered_text']);
 
            comment_div_state($comment_div, f_path, line_no, false);
 
            linkInlineComments($('.firstlink'), $('.comment:first-child'));
 
        };
 
        var postData = {
 
            'text': text,
 
            'f_path': f_path,
 
            'line': line_no
 
        };
 
        ajaxPOST(AJAX_COMMENT_URL, postData, success);
 
    });
 

	
 
    $('#preview-btn_'+line_no).click(function(e){
 
        var text = $('#text_'+line_no).val();
 
        if(!text){
 
            return
 
        }
 
        $('#preview-box_'+line_no).addClass('unloaded');
 
        $('#preview-box_'+line_no).html(_TM['Loading ...']);
 
        $('#edit-container_'+line_no).hide();
 
        $('#edit-btn_'+line_no).show();
 
        $('#preview-container_'+line_no).show();
 
        $('#preview-btn_'+line_no).hide();
 

	
 
        var url = pyroutes.url('changeset_comment_preview', {'repo_name': REPO_NAME});
 
        var post_data = {'text': text};
 
        ajaxPOST(url, post_data, function(html) {
 
            $('#preview-box_'+line_no).html(html);
 
            $('#preview-box_'+line_no).removeClass('unloaded');
 
        })
 
    })
 
    $('#edit-btn_'+line_no).click(function(e) {
 
        $('#edit-container_'+line_no).show();
 
        $('#edit-btn_'+line_no).hide();
 
        $('#preview-container_'+line_no).hide();
 
        $('#preview-btn_'+line_no).show();
 
    })
 

	
 
    // create event for hide button
 
    $form.find('.hide-inline-form').click(function(e) {
 
        comment_div_state($comment_div, f_path, line_no, false);
 
    });
 

	
 
    setTimeout(function() {
 
        // callbacks
 
        tooltip_activate();
 
        MentionsAutoComplete($('#text_'+line_no), $('#mentions_container_'+line_no),
 
                             _USERS_AC_DATA);
 
        $('#text_'+line_no).focus();
 
    }, 10);
 
}
 

	
 

	
 
function deleteComment(comment_id) {
 
    var url = AJAX_COMMENT_DELETE_URL.replace('__COMMENT_ID__', comment_id);
 
    var postData = {'_method': 'delete'};
 
    var success = function(o) {
 
        $('#comment-'+comment_id).remove();
 
        // Ignore that this might leave a stray Add button (or have a pending form with another comment) ...
 
    }
 
    ajaxPOST(url, postData, success);
 
}
 

	
 

	
 
/**
 
 * Double link comments
 
 */
 
var linkInlineComments = function($firstlinks, $comments){
 
    if ($comments.length > 0) {
 
        $firstlinks.html('<a href="#{0}">First comment</a>'.format($comments.prop('id')));
 
    }
 
    if ($comments.length <= 1) {
 
        return;
 
    }
 

	
 
    $comments.each(function(i, e){
 
            var prev = '';
 
            if (i > 0){
 
                var prev_anchor = $($comments.get(i-1)).prop('id');
 
                prev = '<a href="#{0}">Previous comment</a>'.format(prev_anchor);
 
            }
 
            var next = '';
 
            if (i+1 < $comments.length){
 
                var next_anchor = $($comments.get(i+1)).prop('id');
 
                next = '<a href="#{0}">Next comment</a>'.format(next_anchor);
 
            }
 
            $(this).find('.comment-prev-next-links').html(
 
                '<div class="prev-comment">{0}</div>'.format(prev) +
 
                '<div class="next-comment">{0}</div>'.format(next));
 
        });
 
}
 

	
 
/* activate files.html stuff */
 
var fileBrowserListeners = function(current_url, node_list_url, url_base){
 
    var current_url_branch = "?branch=__BRANCH__";
 

	
 
    $('#stay_at_branch').on('click',function(e){
 
        if(e.currentTarget.checked){
 
            var uri = current_url_branch;
 
            uri = uri.replace('__BRANCH__',e.currentTarget.value);
 
            window.location = uri;
 
        }
 
        else{
 
            window.location = current_url;
 
        }
 
    });
 

	
 
    var $node_filter = $('#node_filter');
 

	
 
    var filterTimeout = null;
 
    var nodes = null;
 

	
 
    var initFilter = function(){
 
        $('#node_filter_box_loading').show();
 
        $('#search_activate_id').hide();
 
        $('#add_node_id').hide();
 
        $.ajax({url: node_list_url, headers: {'X-PARTIAL-XHR': '1'}, cache: false})
 
            .done(function(json) {
 
                    nodes = json.nodes;
 
                    $('#node_filter_box_loading').hide();
 
                    $('#node_filter_box').show();
 
                    $node_filter.focus();
 
                    if($node_filter.hasClass('init')){
 
                        $node_filter.val('');
 
                        $node_filter.removeClass('init');
 
                    }
 
                })
 
            .fail(function() {
 
                    console.log('fileBrowserListeners initFilter failed to load');
 
                })
 
        ;
 
    }
 

	
 
    var updateFilter = function(e) {
 
        return function(){
 
            // Reset timeout
 
            filterTimeout = null;
 
            var query = e.currentTarget.value.toLowerCase();
 
            var match = [];
 
            var matches = 0;
 
            var matches_max = 20;
 
            if (query != ""){
 
                for(var i=0;i<nodes.length;i++){
 
                    var pos = nodes[i].name.toLowerCase().indexOf(query);
 
                    if(query && pos != -1){
 
                        matches++
 
                        //show only certain amount to not kill browser
 
                        if (matches > matches_max){
 
                            break;
 
                        }
 

	
 
                        var n = nodes[i].name;
 
                        var t = nodes[i].type;
 
                        var n_hl = n.substring(0,pos)
 
                            + "<b>{0}</b>".format(n.substring(pos,pos+query.length))
 
                            + n.substring(pos+query.length);
 
                        var new_url = url_base.replace('__FPATH__',n);
 
                        match.push('<tr><td><a class="browser-{0}" href="{1}">{2}</a></td><td colspan="5"></td></tr>'.format(t,new_url,n_hl));
 
                    }
 
                    if(match.length >= matches_max){
 
                        match.push('<tr><td>{0}</td><td colspan="5"></td></tr>'.format(_TM['Search truncated']));
 
                        break;
 
                    }
 
                }
 
            }
 
            if(query != ""){
 
                $('#tbody').hide();
 
                $('#tbody_filtered').show();
 

	
 
                if (match.length==0){
 
                  match.push('<tr><td>{0}</td><td colspan="5"></td></tr>'.format(_TM['No matching files']));
 
                }
 

	
 
                $('#tbody_filtered').html(match.join(""));
 
            }
 
            else{
 
                $('#tbody').show();
 
                $('#tbody_filtered').hide();
 
            }
 
        }
 
    };
 

	
 
    $('#filter_activate').click(function(){
 
            initFilter();
 
        });
 
    $node_filter.click(function(){
 
            if($node_filter.hasClass('init')){
 
                $node_filter.val('');
 
                $node_filter.removeClass('init');
 
            }
 
        });
 
    $node_filter.keyup(function(e){
 
            clearTimeout(filterTimeout);
 
            filterTimeout = setTimeout(updateFilter(e),600);
 
        });
 
};
 

	
 

	
 
var initCodeMirror = function(textarea_id, baseUrl, resetUrl){
 
    var myCodeMirror = CodeMirror.fromTextArea($('#' + textarea_id)[0], {
 
            mode: "null",
 
            lineNumbers: true,
 
            indentUnit: 4,
 
            autofocus: true
 
        });
 
    CodeMirror.modeURL = baseUrl + "/codemirror/mode/%N/%N.js";
 

	
 
    $('#reset').click(function(e){
 
            window.location=resetUrl;
 
        });
 

	
 
    $('#file_enable').click(function(){
 
            $('#editor_container').show();
 
            $('#upload_file_container').hide();
 
            $('#filename_container').show();
 
            $('#set_mode_header').show();
 
        });
 

	
 
    $('#upload_file_enable').click(function(){
 
            $('#editor_container').hide();
 
            $('#upload_file_container').show();
 
            $('#filename_container').hide();
 
            $('#set_mode_header').hide();
 
        });
 

	
 
    return myCodeMirror
 
};
 

	
 
var setCodeMirrorMode = function(codeMirrorInstance, mode) {
 
    CodeMirror.autoLoadMode(codeMirrorInstance, mode);
 
}
 

	
 

	
 
var _getIdentNode = function(n){
 
    //iterate thrugh nodes until matching interesting node
 

	
 
    if (typeof n == 'undefined'){
 
        return -1
 
    }
 

	
 
    if(typeof n.id != "undefined" && n.id.match('L[0-9]+')){
 
        return n
 
    }
 
    else{
 
        return _getIdentNode(n.parentNode);
 
    }
 
};
 

	
 
/* generate links for multi line selects that can be shown by files.html page_highlights.
 
 * This is a mouseup handler for hlcode from CodeHtmlFormatter and pygmentize */
 
var getSelectionLink = function(e) {
 
    //get selection from start/to nodes
 
    if (typeof window.getSelection != "undefined") {
 
        var s = window.getSelection();
 

	
 
        var from = _getIdentNode(s.anchorNode);
 
        var till = _getIdentNode(s.focusNode);
 

	
 
        var f_int = parseInt(from.id.replace('L',''));
 
        var t_int = parseInt(till.id.replace('L',''));
 

	
 
        var yoffset = 35;
 
        var ranges = [parseInt(from.id.replace('L','')), parseInt(till.id.replace('L',''))];
 
        if (ranges[0] > ranges[1]){
 
            //highlight from bottom
 
            yoffset = -yoffset;
 
            ranges = [ranges[1], ranges[0]];
 
        }
 
        var $hl_div = $('div#linktt');
 
        // if we select more than 2 lines
 
        if (ranges[0] != ranges[1]){
 
            if ($hl_div.length) {
 
                $hl_div.html('');
 
            } else {
 
                $hl_div = $('<div id="linktt" class="hl-tip-box">');
 
                $('body').prepend($hl_div);
 
            }
 

	
 
            $hl_div.append($('<a>').html(_TM['Selection link']).prop('href', location.href.substring(0, location.href.indexOf('#')) + '#L' + ranges[0] + '-'+ranges[1]));
 
            $hl_div.append($('<a>').html(_TM['Selection Link']).prop('href', location.href.substring(0, location.href.indexOf('#')) + '#L' + ranges[0] + '-'+ranges[1]));
 
            var xy = $(till).offset();
 
            $hl_div.css('top', (xy.top + yoffset) + 'px').css('left', xy.left + 'px');
 
            $hl_div.show();
 
        }
 
        else{
 
            $hl_div.hide();
 
        }
 
    }
 
};
 

	
 
var deleteNotification = function(url, notification_id, callbacks){
 
    var success = function(o){
 
            $("#notification_"+notification_id).remove();
 
            _run_callbacks(callbacks);
 
        };
 
    var failure = function(o){
 
            alert("deleteNotification failure");
 
        };
 
    var postData = {'_method': 'delete'};
 
    var sUrl = url.replace('__NOTIFICATION_ID__',notification_id);
 
    ajaxPOST(sUrl, postData, success, failure);
 
};
 

	
 
var readNotification = function(url, notification_id, callbacks){
 
    var success = function(o){
 
            var $obj = $("#notification_"+notification_id);
 
            $obj.removeClass('unread');
 
            $obj.find('.read-notification').remove();
 
            _run_callbacks(callbacks);
 
        };
 
    var failure = function(o){
 
            alert("readNotification failure");
 
        };
 
    var postData = {'_method': 'put'};
 
    var sUrl = url.replace('__NOTIFICATION_ID__',notification_id);
 
    ajaxPOST(sUrl, postData, success, failure);
 
};
 

	
 
/**
 
 * Autocomplete functionality
 
 */
 

	
 
// Custom search function for the DataSource of users
 
var autocompleteMatchUsers = function (sQuery, myUsers) {
 
    // Case insensitive matching
 
    var query = sQuery.toLowerCase();
 
    var i = 0;
 
    var l = myUsers.length;
 
    var matches = [];
 

	
 
    // Match against each name of each contact
 
    for (; i < l; i++) {
 
        var contact = myUsers[i];
 
        if (((contact.fname+"").toLowerCase().indexOf(query) > -1) ||
 
             ((contact.lname+"").toLowerCase().indexOf(query) > -1) ||
 
             ((contact.nname) && ((contact.nname).toLowerCase().indexOf(query) > -1))) {
 
            matches[matches.length] = contact;
 
        }
 
    }
 
    return matches;
 
};
 

	
 
// Custom search function for the DataSource of userGroups
 
var autocompleteMatchGroups = function (sQuery, myGroups) {
 
    // Case insensitive matching
 
    var query = sQuery.toLowerCase();
 
    var i = 0;
 
    var l = myGroups.length;
 
    var matches = [];
 

	
 
    // Match against each name of each group
 
    for (; i < l; i++) {
 
        var matched_group = myGroups[i];
 
        if (matched_group.grname.toLowerCase().indexOf(query) > -1) {
 
            matches[matches.length] = matched_group;
 
        }
 
    }
 
    return matches;
 
};
 

	
 
// Helper highlight function for the formatter
 
var autocompleteHighlightMatch = function (full, snippet, matchindex) {
 
    return full.substring(0, matchindex)
 
        + "<span class='match'>"
 
        + full.substr(matchindex, snippet.length)
 
        + "</span>" + full.substring(matchindex + snippet.length);
 
};
 

	
 
var gravatar = function(link, size, cssclass) {
 
    var elem = '<img alt="" class="{2}" style="width: {0}px; height: {0}px" src="{1}"/>'.format(size, link, cssclass);
 
    if (!link) {
 
        elem = '<i class="icon-user {1}" style="font-size: {0}px;"></i>'.format(size, cssclass);
 
    }
 
    return elem;
 
}
 

	
 
var autocompleteGravatar = function(res, link, size, group) {
 
    var elem = gravatar(link, size, "perm-gravatar-ac");
 
    if (group !== undefined) {
 
        elem = '<i class="perm-gravatar-ac icon-users"></i>';
 
    }
 
    return '<div class="ac-container-wrap">{0}{1}</div>'.format(elem, res);
 
}
 

	
 
// Custom formatter to highlight the matching letters
 
var autocompleteFormatter = function (oResultData, sQuery, sResultMatch) {
 
    var query = sQuery.toLowerCase();
 

	
 
    // group
 
    if (oResultData.grname != undefined) {
 
        var grname = oResultData.grname;
 
        var grmembers = oResultData.grmembers;
 
        var grnameMatchIndex = grname.toLowerCase().indexOf(query);
 
        var grprefix = "{0}: ".format(_TM['Group']);
 
        var grsuffix = " ({0} {1})".format(grmembers, _TM['members']);
 

	
 
        if (grnameMatchIndex > -1) {
 
            return autocompleteGravatar(grprefix + autocompleteHighlightMatch(grname, query, grnameMatchIndex) + grsuffix, null, null, true);
 
        }
 
        return autocompleteGravatar(grprefix + oResultData.grname + grsuffix, null, null, true);
 

	
 
    // users
 
    } else if (oResultData.nname != undefined) {
 
        var fname = oResultData.fname || "";
 
        var lname = oResultData.lname || "";
 
        var nname = oResultData.nname;
 

	
 
        // Guard against null value
 
        var fnameMatchIndex = fname.toLowerCase().indexOf(query),
 
            lnameMatchIndex = lname.toLowerCase().indexOf(query),
 
            nnameMatchIndex = nname.toLowerCase().indexOf(query),
 
            displayfname, displaylname, displaynname, displayname;
 

	
 
        if (fnameMatchIndex > -1) {
 
            displayfname = autocompleteHighlightMatch(fname, query, fnameMatchIndex);
 
        } else {
 
            displayfname = fname;
 
        }
 

	
 
        if (lnameMatchIndex > -1) {
 
            displaylname = autocompleteHighlightMatch(lname, query, lnameMatchIndex);
 
        } else {
 
            displaylname = lname;
 
        }
 

	
 
        if (nnameMatchIndex > -1) {
 
            displaynname = autocompleteHighlightMatch(nname, query, nnameMatchIndex);
 
        } else {
 
            displaynname = nname;
 
        }
 

	
 
        displayname = displaynname;
 
        if (displayfname && displaylname) {
 
            displayname = "{0} {1} ({2})".format(displayfname, displaylname, displayname);
 
        }
 

	
 
        return autocompleteGravatar(displayname, oResultData.gravatar_lnk, oResultData.gravatar_size);
 
    } else {
 
        return '';
 
    }
 
};
 

	
 
// Generate a basic autocomplete instance that can be tweaked further by the caller
 
var autocompleteCreate = function ($inputElement, $container, matchFunc) {
 
    var datasource = new YAHOO.util.FunctionDataSource(matchFunc);
 

	
 
    var autocomplete = new YAHOO.widget.AutoComplete($inputElement[0], $container[0], datasource);
 
    autocomplete.useShadow = false;
 
    autocomplete.resultTypeList = false;
 
    autocomplete.animVert = false;
 
    autocomplete.animHoriz = false;
 
    autocomplete.animSpeed = 0.1;
 
    autocomplete.formatResult = autocompleteFormatter;
 

	
 
    return autocomplete;
 
}
 

	
 
var SimpleUserAutoComplete = function ($inputElement, $container, users_list) {
 

	
 
    var matchUsers = function (sQuery) {
 
        return autocompleteMatchUsers(sQuery, users_list);
 
    }
 

	
 
    var userAC = autocompleteCreate($inputElement, $container, matchUsers);
 

	
 
    // Handler for selection of an entry
 
    var itemSelectHandler = function (sType, aArgs) {
 
        var myAC = aArgs[0]; // reference back to the AC instance
 
        var elLI = aArgs[1]; // reference to the selected LI element
 
        var oData = aArgs[2]; // object literal of selected item's result data
 
        myAC.getInputEl().value = oData.nname;
 
    };
 
    userAC.itemSelectEvent.subscribe(itemSelectHandler);
 
}
 

	
 
var MembersAutoComplete = function ($inputElement, $container, users_list, groups_list) {
 

	
 
    var matchAll = function (sQuery) {
 
        var u = autocompleteMatchUsers(sQuery, users_list);
 
        var g = autocompleteMatchGroups(sQuery, groups_list);
 
        return u.concat(g);
 
    };
 

	
 
    var membersAC = autocompleteCreate($inputElement, $container, matchAll);
 

	
 
    // Handler for selection of an entry
 
    var itemSelectHandler = function (sType, aArgs) {
 
        var nextId = $inputElement.prop('id').split('perm_new_member_name_')[1];
 
        var myAC = aArgs[0]; // reference back to the AC instance
 
        var elLI = aArgs[1]; // reference to the selected LI element
 
        var oData = aArgs[2]; // object literal of selected item's result data
 
        //fill the autocomplete with value
 
        if (oData.nname != undefined) {
 
            //users
 
            myAC.getInputEl().value = oData.nname;
 
            $('#perm_new_member_type_'+nextId).val('user');
 
        } else {
 
            //groups
 
            myAC.getInputEl().value = oData.grname;
 
            $('#perm_new_member_type_'+nextId).val('users_group');
 
        }
 
    };
 
    membersAC.itemSelectEvent.subscribe(itemSelectHandler);
 
}
 

	
 
var MentionsAutoComplete = function ($inputElement, $container, users_list) {
 

	
 
    var matchUsers = function (sQuery) {
 
            var org_sQuery = sQuery;
 
            if(this.mentionQuery == null){
 
                return []
 
            }
 
            sQuery = this.mentionQuery;
 
            return autocompleteMatchUsers(sQuery, users_list);
 
    }
 

	
 
    var mentionsAC = autocompleteCreate($inputElement, $container, matchUsers);
 
    mentionsAC.suppressInputUpdate = true;
 
    // Overwrite formatResult to take into account mentionQuery
 
    mentionsAC.formatResult = function (oResultData, sQuery, sResultMatch) {
 
        var org_sQuery = sQuery;
 
        if (this.dataSource.mentionQuery != null) {
 
            sQuery = this.dataSource.mentionQuery;
 
        }
 
        return autocompleteFormatter(oResultData, sQuery, sResultMatch);
 
    }
 

	
 
    // Handler for selection of an entry
 
    if(mentionsAC.itemSelectEvent){
 
        mentionsAC.itemSelectEvent.subscribe(function (sType, aArgs) {
 
            var myAC = aArgs[0]; // reference back to the AC instance
 
            var elLI = aArgs[1]; // reference to the selected LI element
 
            var oData = aArgs[2]; // object literal of selected item's result data
 
            //Replace the mention name with replaced
 
            var re = new RegExp();
 
            var org = myAC.getInputEl().value;
 
            var chunks = myAC.dataSource.chunks
 
            // replace middle chunk(the search term) with actuall  match
 
            chunks[1] = chunks[1].replace('@'+myAC.dataSource.mentionQuery,
 
                                          '@'+oData.nname+' ');
 
            myAC.getInputEl().value = chunks.join('');
 
            myAC.getInputEl().focus(); // Y U NO WORK !?
 
        });
 
    }
 

	
 
    // in this keybuffer we will gather current value of search !
 
    // since we need to get this just when someone does `@` then we do the
 
    // search
 
    mentionsAC.dataSource.chunks = [];
 
    mentionsAC.dataSource.mentionQuery = null;
 

	
 
    mentionsAC.get_mention = function(msg, max_pos) {
 
        var org = msg;
 
        // Must match utils2.py MENTIONS_REGEX.
 
        // Only matching on string up to cursor, so it must end with $
 
        var re = new RegExp('(?:^|[^a-zA-Z0-9])@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])$');
 
        var chunks  = [];
 

	
 
        // cut first chunk until current pos
 
        var to_max = msg.substr(0, max_pos);
 
        var at_pos = Math.max(0,to_max.lastIndexOf('@')-1);
 
        var msg2 = to_max.substr(at_pos);
 

	
 
        chunks.push(org.substr(0,at_pos)); // prefix chunk
 
        chunks.push(msg2);                 // search chunk
 
        chunks.push(org.substr(max_pos));  // postfix chunk
 

	
 
        // clean up msg2 for filtering and regex match
 
        var msg2 = msg2.lstrip(' ').lstrip('\n');
 

	
 
        if(re.test(msg2)){
 
            var unam = re.exec(msg2)[1];
 
            return [unam, chunks];
 
        }
 
        return [null, null];
 
    };
 

	
 
    $inputElement.keyup(function(e){
 
            var currentMessage = $inputElement.val();
 
            var currentCaretPosition = $inputElement[0].selectionStart;
 

	
 
            var unam = mentionsAC.get_mention(currentMessage, currentCaretPosition);
 
            var curr_search = null;
 
            if(unam[0]){
 
                curr_search = unam[0];
 
            }
 

	
 
            mentionsAC.dataSource.chunks = unam[1];
 
            mentionsAC.dataSource.mentionQuery = curr_search;
 
        });
 
}
 

	
 
var addReviewMember = function(id,fname,lname,nname,gravatar_link,gravatar_size){
 
    var displayname = nname;
 
    if ((fname != "") && (lname != "")) {
 
        displayname = "{0} {1} ({2})".format(fname, lname, nname);
 
    }
 
    var gravatarelm = gravatar(gravatar_link, gravatar_size, "");
 
    // WARNING: the HTML below is duplicate with
 
    // kallithea/templates/pullrequests/pullrequest_show.html
 
    // If you change something here it should be reflected in the template too.
 
    var element = (
 
        '     <li id="reviewer_{2}">\n'+
 
        '       <div class="reviewers_member">\n'+
 
        '           <div class="reviewer_status tooltip" title="not_reviewed">\n'+
 
        '             <i class="icon-circle changeset-status-not_reviewed"></i>\n'+
 
        '           </div>\n'+
 
        '         <div class="reviewer_gravatar gravatar">{0}</div>\n'+
 
        '         <div style="float:left;">{1}</div>\n'+
 
        '         <input type="hidden" value="{2}" name="review_members" />\n'+
 
        '         <div class="reviewer_member_remove action_button" onclick="removeReviewMember({2})">\n'+
 
        '             <i class="icon-minus-circled"></i>\n'+
 
        '         </div> (add not saved)\n'+
 
        '       </div>\n'+
 
        '     </li>\n'
 
        ).format(gravatarelm, displayname, id);
 
    // check if we don't have this ID already in
 
    var ids = [];
 
    $('#review_members').find('li').each(function() {
 
            ids.push(this.id);
 
        });
 
    if(ids.indexOf('reviewer_'+id) == -1){
 
        //only add if it's not there
 
        $('#review_members').append(element);
 
    }
 
}
 

	
 
var removeReviewMember = function(reviewer_id, repo_name, pull_request_id){
 
    var $li = $('#reviewer_{0}'.format(reviewer_id));
 
    $li.find('div div').css("text-decoration", "line-through");
 
    $li.find('input').prop('name', 'review_members_removed');
 
    $li.find('.reviewer_member_remove').replaceWith('&nbsp;(remove not saved)');
 
}
 

	
 
/* activate auto completion of users as PR reviewers */
 
var PullRequestAutoComplete = function ($inputElement, $container, users_list) {
 

	
 
    var matchUsers = function (sQuery) {
 
        return autocompleteMatchUsers(sQuery, users_list);
 
    };
 

	
 
    var reviewerAC = autocompleteCreate($inputElement, $container, matchUsers);
 
    reviewerAC.suppressInputUpdate = true;
 

	
 
    // Handler for selection of an entry
 
    if(reviewerAC.itemSelectEvent){
 
        reviewerAC.itemSelectEvent.subscribe(function (sType, aArgs) {
 
            var myAC = aArgs[0]; // reference back to the AC instance
 
            var elLI = aArgs[1]; // reference to the selected LI element
 
            var oData = aArgs[2]; // object literal of selected item's result data
 

	
 
            addReviewMember(oData.id, oData.fname, oData.lname, oData.nname,
 
                            oData.gravatar_lnk, oData.gravatar_size);
 
            myAC.getInputEl().value = '';
 
        });
 
    }
 
}
 

	
 
/**
 
 * Activate .quick_repo_menu
 
 */
 
var quick_repo_menu = function(){
 
    $(".quick_repo_menu").mouseenter(function(e) {
 
            var $menu = $(e.currentTarget).children().first().children().first();
 
            if($menu.hasClass('hidden')){
 
                $menu.removeClass('hidden').addClass('active');
 
                $(e.currentTarget).removeClass('hidden').addClass('active');
 
            }
 
        });
 
    $(".quick_repo_menu").mouseleave(function(e) {
 
            var $menu = $(e.currentTarget).children().first().children().first();
 
            if($menu.hasClass('active')){
 
                $menu.removeClass('active').addClass('hidden');
 
                $(e.currentTarget).removeClass('active').addClass('hidden');
 
            }
 
        });
 
};
 

	
 

	
 
/**
 
 * TABLE SORTING
 
 */
 

	
 
var revisionSort = function(a, b, desc, field) {
 
    var a_ = parseInt(a.getData('last_rev_raw') || 0);
 
    var b_ = parseInt(b.getData('last_rev_raw') || 0);
 

	
 
    return YAHOO.util.Sort.compare(a_, b_, desc);
 
};
 

	
 
var ageSort = function(a, b, desc, field) {
 
    // data is like: <span class="tooltip" date="2014-06-04 18:18:55.325474" title="Wed, 04 Jun 2014 18:18:55">1 day and 23 hours ago</span>
 
    var a_ = $(a.getData(field)).attr('date');
 
    var b_ = $(b.getData(field)).attr('date');
 

	
 
    return YAHOO.util.Sort.compare(a_, b_, desc);
 
};
 

	
 
var lastLoginSort = function(a, b, desc, field) {
 
    var a_ = parseFloat(a.getData('last_login_raw') || 0);
 
    var b_ = parseFloat(b.getData('last_login_raw') || 0);
 

	
 
    return YAHOO.util.Sort.compare(a_, b_, desc);
 
};
 

	
 
var nameSort = function(a, b, desc, field) {
 
    var a_ = a.getData('raw_name') || 0;
 
    var b_ = b.getData('raw_name') || 0;
 

	
 
    return YAHOO.util.Sort.compare(a_, b_, desc);
 
};
 

	
 
var dateSort = function(a, b, desc, field) {
 
    var a_ = parseFloat(a.getData('raw_date') || 0);
 
    var b_ = parseFloat(b.getData('raw_date') || 0);
 

	
 
    return YAHOO.util.Sort.compare(a_, b_, desc);
 
};
 

	
 
var addPermAction = function(_html, users_list, groups_list){
 
    var $last_node = $('.last_new_member').last(); // empty tr between last and add
 
    var next_id = $('.new_members').length;
 
    $last_node.before($('<tr class="new_members">').append(_html.format(next_id)));
 
    MembersAutoComplete($("#perm_new_member_name_"+next_id),
 
            $("#perm_container_"+next_id), users_list, groups_list);
 
}
 

	
 
function ajaxActionRevokePermission(url, obj_id, obj_type, field_id, extra_data) {
 
    var success = function (o) {
 
            $('#' + field_id).remove();
 
        };
 
    var failure = function (o) {
 
            alert(_TM['Failed to revoke permission'] + ": " + o.status);
 
        };
 
    var query_params = {
 
        '_method': 'delete'
 
    }
 
    // put extra data into POST
 
    if (extra_data !== undefined && (typeof extra_data === 'object')){
 
        for(var k in extra_data){
 
            query_params[k] = extra_data[k];
 
        }
 
    }
 

	
 
    if (obj_type=='user'){
 
        query_params['user_id'] = obj_id;
 
        query_params['obj_type'] = 'user';
 
    }
 
    else if (obj_type=='user_group'){
 
        query_params['user_group_id'] = obj_id;
 
        query_params['obj_type'] = 'user_group';
 
    }
 

	
 
    ajaxPOST(url, query_params, success, failure);
 
};
 

	
 
/* Multi selectors */
 

	
 
var MultiSelectWidget = function(selected_id, available_id, form_id){
 
    var $availableselect = $('#' + available_id);
 
    var $selectedselect = $('#' + selected_id);
 

	
 
    //fill available only with those not in selected
 
    var $selectedoptions = $selectedselect.children('option');
 
    $availableselect.children('option').filter(function(i, e){
 
            for(var j = 0, node; node = $selectedoptions[j]; j++){
 
                if(node.value == e.value){
 
                    return true;
 
                }
 
            }
 
            return false;
 
        }).remove();
 

	
 
    $('#add_element').click(function(e){
 
            $selectedselect.append($availableselect.children('option:selected'));
 
        });
 
    $('#remove_element').click(function(e){
 
            $availableselect.append($selectedselect.children('option:selected'));
 
        });
 

	
 
    $('#'+form_id).submit(function(){
 
            $selectedselect.children('option').each(function(i, e){
 
                e.selected = 'selected';
 
            });
 
        });
 
}
 

	
 
// custom paginator
 
var YUI_paginator = function(links_per_page, containers){
 

	
 
    (function () {
 

	
 
        var Paginator = YAHOO.widget.Paginator,
 
            l         = YAHOO.lang,
 
            setId     = YAHOO.util.Dom.generateId;
 

	
 
        Paginator.ui.MyFirstPageLink = function (p) {
 
            this.paginator = p;
 

	
 
            p.subscribe('recordOffsetChange',this.update,this,true);
 
            p.subscribe('rowsPerPageChange',this.update,this,true);
 
            p.subscribe('totalRecordsChange',this.update,this,true);
 
            p.subscribe('destroy',this.destroy,this,true);
 

	
 
            // TODO: make this work
 
            p.subscribe('firstPageLinkLabelChange',this.update,this,true);
 
            p.subscribe('firstPageLinkClassChange',this.update,this,true);
 
        };
 

	
 
        Paginator.ui.MyFirstPageLink.init = function (p) {
 
            p.setAttributeConfig('firstPageLinkLabel', {
 
                value : 1,
 
                validator : l.isString
 
            });
 
            p.setAttributeConfig('firstPageLinkClass', {
 
                value : 'yui-pg-first',
 
                validator : l.isString
 
            });
 
            p.setAttributeConfig('firstPageLinkTitle', {
 
                value : 'First Page',
 
                validator : l.isString
 
            });
 
        };
 

	
 
        // Instance members and methods
 
        Paginator.ui.MyFirstPageLink.prototype = {
 
            current   : null,
 
            leftmost_page: null,
 
            rightmost_page: null,
 
            link      : null,
 
            span      : null,
 
            dotdot    : null,
 
            getPos    : function(cur_page, max_page, items){
 
                var edge = parseInt(items / 2) + 1;
 
                if (cur_page <= edge){
 
                    var radius = Math.max(parseInt(items / 2), items - cur_page);
 
                }
 
                else if ((max_page - cur_page) < edge) {
 
                    var radius = (items - 1) - (max_page - cur_page);
 
                }
 
                else{
 
                    var radius = parseInt(items / 2);
 
                }
 

	
 
                var left = Math.max(1, (cur_page - (radius)));
 
                var right = Math.min(max_page, cur_page + (radius));
 
                return [left, cur_page, right]
 
            },
 
            render : function (id_base) {
 
                var p      = this.paginator,
 
                    c      = p.get('firstPageLinkClass'),
 
                    label  = p.get('firstPageLinkLabel'),
 
                    title  = p.get('firstPageLinkTitle');
 

	
 
                this.link     = document.createElement('a');
 
                this.span     = document.createElement('span');
 
                $(this.span).hide();
 

	
 
                var _pos = this.getPos(p.getCurrentPage(), p.getTotalPages(), 5);
 
                this.leftmost_page = _pos[0];
 
                this.rightmost_page = _pos[2];
 

	
 
                setId(this.link, id_base + '-first-link');
 
                this.link.href      = '#';
 
                this.link.className = c;
 
                this.link.innerHTML = label;
 
                this.link.title     = title;
 
                YUE.on(this.link,'click',this.onClick,this,true);
 

	
 
                setId(this.span, id_base + '-first-span');
 
                this.span.className = c;
 
                this.span.innerHTML = label;
 

	
 
                this.current = p.getCurrentPage() > 1 ? this.link : this.span;
 
                return this.current;
 
            },
 
            update : function (e) {
 
                var p      = this.paginator;
 
                var _pos   = this.getPos(p.getCurrentPage(), p.getTotalPages(), 5);
 
                this.leftmost_page = _pos[0];
 
                this.rightmost_page = _pos[2];
 

	
 
                if (e && e.prevValue === e.newValue) {
 
                    return;
 
                }
 

	
 
                var par = this.current ? this.current.parentNode : null;
 
                if (this.leftmost_page > 1) {
 
                    if (par && this.current === this.span) {
 
                        par.replaceChild(this.link,this.current);
 
                        this.current = this.link;
 
                    }
 
                } else {
 
                    if (par && this.current === this.link) {
 
                        par.replaceChild(this.span,this.current);
 
                        this.current = this.span;
 
                    }
 
                }
 
            },
 
            destroy : function () {
 
                YUE.purgeElement(this.link);
 
                this.current.parentNode.removeChild(this.current);
 
                this.link = this.span = null;
 
            },
 
            onClick : function (e) {
 
                YUE.stopEvent(e);
 
                this.paginator.setPage(1);
 
            }
 
        };
 

	
 
        })();
 

	
 
    (function () {
 

	
 
        var Paginator = YAHOO.widget.Paginator,
 
            l         = YAHOO.lang,
 
            setId     = YAHOO.util.Dom.generateId;
 

	
 
        Paginator.ui.MyLastPageLink = function (p) {
 
            this.paginator = p;
 

	
 
            p.subscribe('recordOffsetChange',this.update,this,true);
 
            p.subscribe('rowsPerPageChange',this.update,this,true);
 
            p.subscribe('totalRecordsChange',this.update,this,true);
 
            p.subscribe('destroy',this.destroy,this,true);
 

	
 
            // TODO: make this work
 
            p.subscribe('lastPageLinkLabelChange',this.update,this,true);
 
            p.subscribe('lastPageLinkClassChange', this.update,this,true);
 
        };
 

	
 
        Paginator.ui.MyLastPageLink.init = function (p) {
 
            p.setAttributeConfig('lastPageLinkLabel', {
 
                value : -1,
 
                validator : l.isString
 
            });
 
            p.setAttributeConfig('lastPageLinkClass', {
 
                value : 'yui-pg-last',
 
                validator : l.isString
 
            });
 
            p.setAttributeConfig('lastPageLinkTitle', {
 
                value : 'Last Page',
 
                validator : l.isString
 
            });
 

	
 
        };
 

	
 
        Paginator.ui.MyLastPageLink.prototype = {
 

	
 
            current   : null,
 
            leftmost_page: null,
 
            rightmost_page: null,
 
            link      : null,
 
            span      : null,
 
            dotdot    : null,
 
            na        : null,
 
            getPos    : function(cur_page, max_page, items){
 
                var edge = parseInt(items / 2) + 1;
 
                if (cur_page <= edge){
 
                    var radius = Math.max(parseInt(items / 2), items - cur_page);
 
                }
 
                else if ((max_page - cur_page) < edge) {
 
                    var radius = (items - 1) - (max_page - cur_page);
 
                }
 
                else{
 
                    var radius = parseInt(items / 2);
 
                }
 

	
 
                var left = Math.max(1, (cur_page - (radius)));
 
                var right = Math.min(max_page, cur_page + (radius));
 
                return [left, cur_page, right]
 
            },
 
            render : function (id_base) {
 
                var p      = this.paginator,
 
                    c      = p.get('lastPageLinkClass'),
 
                    label  = p.get('lastPageLinkLabel'),
 
                    last   = p.getTotalPages(),
 
                    title  = p.get('lastPageLinkTitle');
 

	
 
                var _pos = this.getPos(p.getCurrentPage(), p.getTotalPages(), 5);
 
                this.leftmost_page = _pos[0];
 
                this.rightmost_page = _pos[2];
 

	
 
                this.link = document.createElement('a');
 
                this.span = document.createElement('span');
 
                $(this.span).hide();
 

	
 
                this.na   = this.span.cloneNode(false);
 

	
 
                setId(this.link, id_base + '-last-link');
 
                this.link.href      = '#';
 
                this.link.className = c;
 
                this.link.innerHTML = label;
 
                this.link.title     = title;
 
                YUE.on(this.link,'click',this.onClick,this,true);
 

	
 
                setId(this.span, id_base + '-last-span');
 
                this.span.className = c;
 
                this.span.innerHTML = label;
 

	
 
                setId(this.na, id_base + '-last-na');
 

	
 
                if (this.rightmost_page < p.getTotalPages()){
 
                    this.current = this.link;
 
                }
 
                else{
 
                    this.current = this.span;
 
                }
 

	
 
                this.current.innerHTML = p.getTotalPages();
 
                return this.current;
 
            },
 

	
 
            update : function (e) {
 
                var p      = this.paginator;
 

	
 
                var _pos = this.getPos(p.getCurrentPage(), p.getTotalPages(), 5);
 
                this.leftmost_page = _pos[0];
 
                this.rightmost_page = _pos[2];
 

	
 
                if (e && e.prevValue === e.newValue) {
 
                    return;
 
                }
 

	
 
                var par   = this.current ? this.current.parentNode : null,
 
                    after = this.link;
 
                if (par) {
 

	
 
                    // only show the last page if the rightmost one is
 
                    // lower, so we don't have doubled entries at the end
 
                    if (!(this.rightmost_page < p.getTotalPages())){
 
                        after = this.span
 
                    }
 

	
 
                    if (this.current !== after) {
 
                        par.replaceChild(after,this.current);
 
                        this.current = after;
 
                    }
 
                }
 
                this.current.innerHTML = this.paginator.getTotalPages();
 

	
 
            },
 
            destroy : function () {
 
                YUE.purgeElement(this.link);
 
                this.current.parentNode.removeChild(this.current);
 
                this.link = this.span = null;
 
            },
 
            onClick : function (e) {
 
                YUE.stopEvent(e);
 
                this.paginator.setPage(this.paginator.getTotalPages());
 
            }
 
        };
 

	
 
        })();
 

	
 
    var pagi = new YAHOO.widget.Paginator({
 
        rowsPerPage: links_per_page,
 
        alwaysVisible: false,
 
        template : "{PreviousPageLink} {MyFirstPageLink} {PageLinks} {MyLastPageLink} {NextPageLink}",
 
        pageLinks: 5,
 
        containerClass: 'pagination-wh',
 
        currentPageClass: 'pager_curpage',
 
        pageLinkClass: 'pager_link',
 
        nextPageLinkLabel: '&gt;',
 
        previousPageLinkLabel: '&lt;',
 
        containers:containers
 
    });
 

	
 
    return pagi
 
}
 

	
 
var YUI_datatable = function(data, fields, columns, countnode, sortkey, rows){
 
    var myDataSource = new YAHOO.util.DataSource(data);
 
    myDataSource.responseType = YAHOO.util.DataSource.TYPE_JSON;
 
    myDataSource.responseSchema = {
 
        resultsList: "records",
 
        fields: fields
 
        };
 
    myDataSource.doBeforeCallback = function(req, raw, res, cb) {
 
        // This is the filter function
 
        var data     = res.results || [],
 
            filtered = [],
 
            i, l;
 

	
 
        if (req) {
 
            req = req.toLowerCase();
 
            for (i = 0; i<data.length; i++) {
 
                var pos = data[i].raw_name.toLowerCase().indexOf(req);
 
                if (pos != -1) {
 
                    filtered.push(data[i]);
 
                }
 
            }
 
            res.results = filtered;
 
        }
 
        $(countnode).html(res.results.length);
 
        return res;
 
    }
 

	
 
    var myDataTable = new YAHOO.widget.DataTable("datatable_list_wrap", columns, myDataSource, {
 
        sortedBy: {key:sortkey, dir:"asc"},
 
        paginator: YUI_paginator(rows !== undefined && rows ? rows : 25, ['user-paginator']),
 
        MSG_SORTASC: _TM['MSG_SORTASC'],
 
        MSG_SORTDESC: _TM['MSG_SORTDESC'],
 
        MSG_EMPTY: _TM['MSG_EMPTY'],
 
        MSG_ERROR: _TM['MSG_ERROR'],
 
        MSG_LOADING: _TM['MSG_LOADING']
 
        });
 
    myDataTable.subscribe('postRenderEvent',function(oArgs) {
 
        tooltip_activate();
 
        quick_repo_menu();
 
        });
 

	
 
    var filterTimeout = null;
 
    var $q_filter = $('#q_filter');
 

	
 
    var updateFilter = function () {
 
        // Reset timeout
 
        filterTimeout = null;
 

	
 
        // Reset sort
 
        var state = myDataTable.getState();
 
        state.sortedBy = {key:sortkey, dir:YAHOO.widget.DataTable.CLASS_ASC};
 

	
 
        // Get filtered data
 
        myDataSource.sendRequest($q_filter.val(), {
 
            success : myDataTable.onDataReturnInitializeTable,
 
            failure : myDataTable.onDataReturnInitializeTable,
 
            scope   : myDataTable,
 
            argument: state});
 
        };
 

	
 
    $q_filter.click(function(){
 
            if(!$q_filter.hasClass('loaded')){
 
                //TODO: load here full list later to do search within groups
 
                $q_filter.addClass('loaded');
 
            }
 
        });
 

	
 
    $q_filter.keyup(function (e) {
 
            clearTimeout(filterTimeout);
 
            filterTimeout = setTimeout(updateFilter, 600);
 
        });
 
}
 

	
 
/**
 
 Branch Sorting callback for select2, modifying the filtered result so prefix
 
 matches come before matches in the line.
 
 **/
 
var branchSort = function(results, container, query) {
 
    if (query.term) {
 
        return results.sort(function (a, b) {
 
            // Put closed branches after open ones (a bit of a hack ...)
 
            var aClosed = a.text.indexOf("(closed)") > -1,
 
                bClosed = b.text.indexOf("(closed)") > -1;
 
            if (aClosed && !bClosed) {
 
                return 1;
 
            }
 
            if (bClosed && !aClosed) {
 
                return -1;
 
            }
 

	
 
            // Put prefix matches before matches in the line
 
            var aPos = a.text.indexOf(query.term),
 
                bPos = b.text.indexOf(query.term);
 
            if (aPos === 0 && bPos !== 0) {
 
                return -1;
 
            }
 
            if (bPos === 0 && aPos !== 0) {
 
                return 1;
 
            }
 

	
 
            // Default sorting
 
            if (a.text > b.text) {
 
                return 1;
 
            }
 
            if (a.text < b.text) {
 
                return -1;
 
            }
 
            return 0;
 
        });
 
    }
 
    return results;
 
};
 

	
 
// global hooks after DOM is loaded
 

	
 
$(document).ready(function(){
 
    $('.diff-collapse-button').click(function(e) {
 
        var $button = $(e.currentTarget);
 
        var $target = $('#' + $button.prop('target'));
 
        if($target.hasClass('hidden')){
 
            $target.removeClass('hidden');
 
            $button.html("&uarr; {0} &uarr;".format(_TM['Collapse Diff']));
 
        }
 
        else if(!$target.hasClass('hidden')){
 
            $target.addClass('hidden');
 
            $button.html("&darr; {0} &darr;".format(_TM['Expand Diff']));
 
        }
 
    });
 
});
0 comments (0 inline, 0 general)