Changeset - 431689d7f37d
[Not reviewed]
default
0 41 0
Søren Løvborg - 10 years ago 2015-08-31 17:42:57
sorenl@unity3d.com
remove vestiges of Python 2.5 support

We only support Python 2.6 and 2.7; hence we do not need to import
with-statement support from __future__.
41 files changed with 0 insertions and 41 deletions:
0 comments (0 inline, 0 general)
kallithea/bin/kallithea_api.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_api
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Api CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 3, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import sys
 
import argparse
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] "
 
      "METHOD <key:val> <key2:val> ...\n"
 
      "Create config file: kallithea-api --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea API cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file')
 
    group.add_argument('--save-config', action='store_true', help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('API')
 
    group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None,
 
            help='API method name to call followed by key:value attributes',
 
    )
 
    group.add_argument('--format', dest='format', type=str,
 
            help='output format default: `%s` can '
 
                 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    apihost = args.apihost or conf['apihost']
 
    method = args.method
 

	
 
    # if we don't have method here it's an error
 
    if not method:
 
        parser.error('Please specify method name')
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
    return 0
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_config.py
Show inline comments
 
#!/usr/bin/env python2
 

	
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_config
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
configuration generator for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 18, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
from __future__ import with_statement
 
import os
 
import sys
 
import uuid
 
import argparse
 
from mako.template import Template
 
TMPL = 'template.ini.mako'
 
here = os.path.dirname(os.path.abspath(__file__))
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-config [-h] [--filename=FILENAME] [--template=TEMPLATE] \n"
 
      "VARS optional specify extra template variable that will be available in "
 
      "template. Use comma separated key=val format eg.\n"
 
      "key1=val1,port=5000,host=127.0.0.1,elements='a\,b\,c'\n"
 
    )
 

	
 
    parser = argparse.ArgumentParser(
 
        description='Kallithea CONFIG generator with variable replacement',
 
        usage=usage
 
    )
 

	
 
    ## config
 
    group = parser.add_argument_group('CONFIG')
 
    group.add_argument('--filename', help='Output ini filename.')
 
    group.add_argument('--template', help='Mako template file to use instead of '
 
                                          'the default builtin template')
 
    group.add_argument('--raw', help='Store given mako template as raw without '
 
                                     'parsing. Use this to create custom template '
 
                                     'initially', action='store_true')
 
    group.add_argument('--show-defaults', help='Show all default variables for '
 
                                               'builtin template', action='store_true')
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _escape_split(text, sep):
 
    """
 
    Allows for escaping of the separator: e.g. arg='foo\, bar'
 

	
 
    It should be noted that the way bash et. al. do command line parsing, those
 
    single quotes are required. a shameless ripoff from fabric project.
 

	
 
    """
 
    escaped_sep = r'\%s' % sep
 

	
 
    if escaped_sep not in text:
 
        return text.split(sep)
 

	
 
    before, _, after = text.partition(escaped_sep)
 
    startlist = before.split(sep)  # a regular split is fine here
 
    unfinished = startlist[-1]
 
    startlist = startlist[:-1]
 

	
 
    # recurse because there may be more escaped separators
 
    endlist = _escape_split(after, sep)
 

	
 
    # finish building the escaped value. we use endlist[0] becaue the first
 
    # part of the string sent in recursion is the rest of the escaped value.
 
    unfinished += sep + endlist[0]
 

	
 
    return startlist + [unfinished] + endlist[1:]  # put together all the parts
 

	
 
def _run(argv):
 
    parser, args, other = argparser(argv)
 
    if not len(sys.argv) > 1:
 
        print parser.print_help()
 
        sys.exit(0)
 
    # defaults that can be overwritten by arguments
 
    tmpl_stored_args = {
 
        'http_server': 'waitress',
 
        'lang': 'en',
 
        'database_engine': 'sqlite',
 
        'host': '127.0.0.1',
 
        'port': 5000,
 
        'error_aggregation_service': None,
 
    }
 
    if other:
 
        # parse arguments, we assume only first is correct
 
        kwargs = {}
 
        for el in _escape_split(other[0], ','):
 
            kv = _escape_split(el, '=')
 
            if len(kv) == 2:
 
                k, v = kv
 
                kwargs[k] = v
 
        # update our template stored args
 
        tmpl_stored_args.update(kwargs)
 

	
 
    # use default that cannot be replaced
 
    tmpl_stored_args.update({
 
        'uuid': lambda: uuid.uuid4().hex,
 
        'here': os.path.abspath(os.curdir),
 
    })
 
    if args.show_defaults:
 
        for k,v in tmpl_stored_args.iteritems():
 
            print '%s=%s' % (k, v)
 
        sys.exit(0)
 
    try:
 
        # built in template
 
        tmpl_file = os.path.join(here, TMPL)
 
        if args.template:
 
            tmpl_file = args.template
 

	
 
        with open(tmpl_file, 'rb') as f:
 
            tmpl_data = f.read().decode('utf-8')
 
            if args.raw:
 
                tmpl = tmpl_data
 
            else:
 
                tmpl = Template(tmpl_data).render(**tmpl_stored_args)
 
        with open(args.filename, 'wb') as f:
 
            f.write(tmpl.encode('utf-8'))
 
        print 'Wrote new config file in %s' % (os.path.abspath(args.filename))
 

	
 
    except Exception:
 
        from mako import exceptions
 
        print exceptions.text_error_template().render()
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    return _run(argv)
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_gist.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_gist
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Gist CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import sys
 
import stat
 
import argparse
 
import fileinput
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] [GIST OPTIONS] "
 
      "[filename or stdin use - for terminal stdin ]\n"
 
      "Create config file: kallithea-gist --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea Gist cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea')
 
    group.add_argument('--save-config', action='store_true',
 
                       help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('GIST')
 
    group.add_argument('-p', '--private', action='store_true',
 
                       help='create private Gist')
 
    group.add_argument('-f', '--filename',
 
                       help='set uploaded gist filename, '
 
                            'also defines syntax highlighting')
 
    group.add_argument('-d', '--description', help='Gist description')
 
    group.add_argument('-l', '--lifetime', metavar='MINUTES',
 
                       help='gist lifetime in minutes, -1 (DEFAULT) is forever')
 
    group.add_argument('--format', dest='format', type=str,
 
                       help='output format DEFAULT: `%s` can '
 
                       'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _run(argv):
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    host = args.apihost or conf['apihost']
 
    DEFAULT_FILENAME = 'gistfile1.txt'
 
    if other:
 
        # skip multifiles for now
 
        filename = other[0]
 
        if filename == '-':
 
            filename = DEFAULT_FILENAME
 
            gist_content = ''
 
            for line in fileinput.input('-'):
 
                gist_content += line
 
        else:
 
            with open(filename, 'rb') as f:
 
                gist_content = f.read()
 

	
 
    else:
 
        filename = DEFAULT_FILENAME
 
        gist_content = None
 
        # little bit hacky but cross platform check where the
 
        # stdin comes from we skip the terminal case it can be handled by '-'
 
        mode = os.fstat(0).st_mode
 
        if stat.S_ISFIFO(mode):
 
            # "stdin is piped"
 
            gist_content = sys.stdin.read()
 
        elif stat.S_ISREG(mode):
 
            # "stdin is redirected"
 
            gist_content = sys.stdin.read()
 
        else:
 
            # "stdin is terminal"
 
            pass
 

	
 
    # make sure we don't upload binary stuff
 
    if gist_content and '\0' in gist_content:
 
        raise Exception('Error: binary files upload is not possible')
 

	
 
    filename = os.path.basename(args.filename or filename)
 
    if gist_content:
 
        files = {
 
            filename: {
 
                'content': gist_content,
 
                'lexer': None
 
            }
 
        }
 

	
 
        margs = dict(
 
            lifetime=args.lifetime,
 
            description=args.description,
 
            gist_type='private' if args.private else 'public',
 
            files=files
 
        )
 

	
 
        json_data = api_call(apikey, host, 'create_gist', **margs)['result']
 
        if args.format == FORMAT_JSON:
 
            print json.dumps(json_data)
 
        elif args.format == FORMAT_PRETTY:
 
            print json_data
 
            print 'Created %s gist %s' % (json_data['gist']['type'],
 
                                          json_data['gist']['url'])
 
    return 0
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    try:
 
        return _run(argv)
 
    except Exception as e:
 
        print e
 
        return 1
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/config/routing.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Routes configuration
 

	
 
The more specific and detailed routes should be defined first so they
 
may take precedent over the more generic routes. For more information
 
refer to the routes manual at http://routes.groovie.org/docs/
 
"""
 

	
 
from __future__ import with_statement
 
from routes import Mapper
 

	
 
# prefix for non repository related links needs to be prefixed with `/`
 
ADMIN_PREFIX = '/_admin'
 

	
 

	
 
def make_map(config):
 
    """Create, configure and return the routes Mapper"""
 
    rmap = Mapper(directory=config['pylons.paths']['controllers'],
 
                  always_scan=config['debug'])
 
    rmap.minimization = False
 
    rmap.explicit = False
 

	
 
    from kallithea.lib.utils import (is_valid_repo, is_valid_repo_group,
 
                                     get_repo_by_id)
 

	
 
    def check_repo(environ, match_dict):
 
        """
 
        check for valid repository for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_name = match_dict.get('repo_name')
 

	
 
        if match_dict.get('f_path'):
 
            #fix for multiple initial slashes that causes errors
 
            match_dict['f_path'] = match_dict['f_path'].lstrip('/')
 

	
 
        by_id_match = get_repo_by_id(repo_name)
 
        if by_id_match:
 
            repo_name = by_id_match
 
            match_dict['repo_name'] = repo_name
 

	
 
        return is_valid_repo(repo_name, config['base_path'])
 

	
 
    def check_group(environ, match_dict):
 
        """
 
        check for valid repository group for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_group_name = match_dict.get('group_name')
 
        return is_valid_repo_group(repo_group_name, config['base_path'])
 

	
 
    def check_group_skip_path(environ, match_dict):
 
        """
 
        check for valid repository group for proper 404 handling, but skips
 
        verification of existing path
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_group_name = match_dict.get('group_name')
 
        return is_valid_repo_group(repo_group_name, config['base_path'],
 
                                   skip_path_check=True)
 

	
 
    def check_user_group(environ, match_dict):
 
        """
 
        check for valid user group for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        return True
 

	
 
    def check_int(environ, match_dict):
 
        return match_dict.get('id').isdigit()
 

	
 
    # The ErrorController route (handles 404/500 error pages); it should
 
    # likely stay at the top, ensuring it can always be resolved
 
    rmap.connect('/error/{action}', controller='error')
 
    rmap.connect('/error/{action}/{id}', controller='error')
 

	
 
    #==========================================================================
 
    # CUSTOM ROUTES HERE
 
    #==========================================================================
 

	
 
    #MAIN PAGE
 
    rmap.connect('home', '/', controller='home', action='index')
 
    rmap.connect('about', '/about', controller='home', action='about')
 
    rmap.connect('repo_switcher_data', '/_repos', controller='home',
 
                 action='repo_switcher_data')
 

	
 
    rmap.connect('rst_help',
 
                 "http://docutils.sourceforge.net/docs/user/rst/quickref.html",
 
                 _static=True)
 
    rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True)
 
    rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True)
 

	
 
    #ADMIN REPOSITORY ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/repos') as m:
 
        m.connect("repos", "/repos",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("repos", "/repos",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_repo", "/create_repository",
 
                  action="create_repository", conditions=dict(method=["GET"]))
 
        m.connect("put_repo", "/repos/{repo_name:.*?}",
 
                  action="update", conditions=dict(method=["PUT"],
 
                  function=check_repo))
 
        m.connect("delete_repo", "/repos/{repo_name:.*?}",
 
                  action="delete", conditions=dict(method=["DELETE"],
 
                  ))
 

	
 
    #ADMIN REPOSITORY GROUPS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/repo_groups') as m:
 
        m.connect("repos_groups", "/repo_groups",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("repos_groups", "/repo_groups",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_repos_group", "/repo_groups/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
 
                  action="update", conditions=dict(method=["PUT"],
 
                                                   function=check_group))
 

	
 
        m.connect("repos_group", "/repo_groups/{group_name:.*?}",
 
                  action="show", conditions=dict(method=["GET"],
 
                                                 function=check_group))
 

	
 
        #EXTRAS REPO GROUP ROUTES
 
        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
 
                  action="edit",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
 
                  action="edit",
 
                  conditions=dict(method=["PUT"], function=check_group))
 

	
 
        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
 
                  action="edit_repo_group_advanced",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
 
                  action="edit_repo_group_advanced",
 
                  conditions=dict(method=["PUT"], function=check_group))
 

	
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="edit_repo_group_perms",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="update_perms",
 
                  conditions=dict(method=["PUT"], function=check_group))
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="delete_perms",
 
                  conditions=dict(method=["DELETE"], function=check_group))
 

	
 
        m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}",
 
                  action="delete", conditions=dict(method=["DELETE"],
 
                                                   function=check_group_skip_path))
 

	
 

	
 
    #ADMIN USER ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/users') as m:
 
        m.connect("users", "/users",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("users", "/users",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("formatted_users", "/users.{format}",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_user", "/users/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_user", "/users/{id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("delete_user", "/users/{id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_user", "/users/{id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 
        m.connect("user", "/users/{id}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
        #EXTRAS USER ROUTES
 
        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
 
                  action="edit_advanced", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
 
                  action="update_advanced", conditions=dict(method=["PUT"]))
 

	
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="edit_api_keys", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="add_api_key", conditions=dict(method=["POST"]))
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="delete_api_key", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
 
                  action="edit_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
 
                  action="update_perms", conditions=dict(method=["PUT"]))
 

	
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="edit_emails", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="add_email", conditions=dict(method=["PUT"]))
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="delete_email", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="edit_ips", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="add_ip", conditions=dict(method=["PUT"]))
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="delete_ip", conditions=dict(method=["DELETE"]))
 

	
 
    #ADMIN USER GROUPS REST ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/user_groups') as m:
 
        m.connect("users_groups", "/user_groups",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("users_groups", "/user_groups",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_users_group", "/user_groups/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_users_group", "/user_groups/{id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("delete_users_group", "/user_groups/{id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_users_group", "/user_groups/{id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]),
 
                  function=check_user_group)
 
        m.connect("users_group", "/user_groups/{id}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
        #EXTRAS USER GROUP ROUTES
 
        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
 
                  action="edit_default_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
 
                  action="update_default_perms", conditions=dict(method=["PUT"]))
 

	
 

	
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="edit_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="update_perms", conditions=dict(method=["PUT"]))
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="delete_perms", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced",
 
                  action="edit_advanced", conditions=dict(method=["GET"]))
 

	
 
        m.connect("edit_user_group_members", "/user_groups/{id}/edit/members",
 
                  action="edit_members", conditions=dict(method=["GET"]))
 

	
 

	
 

	
 
    #ADMIN PERMISSIONS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/permissions') as m:
 
        m.connect("admin_permissions", "/permissions",
 
                  action="permission_globals", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions", "/permissions",
 
                  action="permission_globals", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_permissions_ips", "/permissions/ips",
 
                  action="permission_ips", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions_ips", "/permissions/ips",
 
                  action="permission_ips", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_permissions_perms", "/permissions/perms",
 
                  action="permission_perms", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions_perms", "/permissions/perms",
 
                  action="permission_perms", conditions=dict(method=["GET"]))
 

	
 

	
 
    #ADMIN DEFAULTS REST ROUTES
 
    rmap.resource('default', 'defaults',
 
                  controller='admin/defaults', path_prefix=ADMIN_PREFIX)
 

	
 
    #ADMIN AUTH SETTINGS
 
    rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX,
 
                 controller='admin/auth_settings', action='auth_settings',
 
                 conditions=dict(method=["POST"]))
 
    rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX,
 
                 controller='admin/auth_settings')
 

	
 
    #ADMIN SETTINGS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/settings') as m:
 
        m.connect("admin_settings", "/settings",
 
                  action="settings_vcs", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings", "/settings",
 
                  action="settings_vcs", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_mapping", "/settings/mapping",
 
                  action="settings_mapping", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_mapping", "/settings/mapping",
 
                  action="settings_mapping", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_global", "/settings/global",
 
                  action="settings_global", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_global", "/settings/global",
 
                  action="settings_global", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_visual", "/settings/visual",
 
                  action="settings_visual", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_visual", "/settings/visual",
 
                  action="settings_visual", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_email", "/settings/email",
 
                  action="settings_email", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_email", "/settings/email",
 
                  action="settings_email", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["DELETE"]))
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_search", "/settings/search",
 
                  action="settings_search", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_search", "/settings/search",
 
                  action="settings_search", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_system", "/settings/system",
 
                  action="settings_system", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_system", "/settings/system",
 
                  action="settings_system", conditions=dict(method=["GET"]))
 
        m.connect("admin_settings_system_update", "/settings/system/updates",
 
                  action="settings_system_update", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN MY ACCOUNT
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/my_account') as m:
 

	
 
        m.connect("my_account", "/my_account",
 
                  action="my_account", conditions=dict(method=["GET"]))
 
        m.connect("my_account", "/my_account",
 
                  action="my_account", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_password", "/my_account/password",
 
                  action="my_account_password", conditions=dict(method=["GET"]))
 
        m.connect("my_account_password", "/my_account/password",
 
                  action="my_account_password", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_repos", "/my_account/repos",
 
                  action="my_account_repos", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_watched", "/my_account/watched",
 
                  action="my_account_watched", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_perms", "/my_account/perms",
 
                  action="my_account_perms", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails", conditions=dict(method=["GET"]))
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails_add", conditions=dict(method=["POST"]))
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails_delete", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys", conditions=dict(method=["GET"]))
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys_add", conditions=dict(method=["POST"]))
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys_delete", conditions=dict(method=["DELETE"]))
 

	
 
    #NOTIFICATION REST ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/notifications') as m:
 
        m.connect("notifications", "/notifications",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("notifications", "/notifications",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("notifications_mark_all_read", "/notifications/mark_all_read",
 
                  action="mark_all_read", conditions=dict(method=["GET"]))
 
        m.connect("formatted_notifications", "/notifications.{format}",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_notification", "/notifications/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("formatted_new_notification", "/notifications/new.{format}",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("/notifications/{notification_id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("/notifications/{notification_id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_notification", "/notifications/{notification_id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 
        m.connect("formatted_edit_notification",
 
                  "/notifications/{notification_id}.{format}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 
        m.connect("notification", "/notifications/{notification_id}",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_notification", "/notifications/{notification_id}.{format}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN GIST
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/gists') as m:
 
        m.connect("gists", "/gists",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("gists", "/gists",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_gist", "/gists/new",
 
                  action="new", conditions=dict(method=["GET"]))
 

	
 

	
 
        m.connect("/gists/{gist_id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("/gists/{gist_id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_gist", "/gists/{gist_id}/edit",
 
                  action="edit", conditions=dict(method=["GET", "POST"]))
 
        m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision",
 
                  action="check_revision", conditions=dict(method=["POST"]))
 

	
 

	
 
        m.connect("gist", "/gists/{gist_id}",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("gist_rev", "/gists/{gist_id}/{revision}",
 
                  revision="tip",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}",
 
                  revision="tip",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}",
 
                  revision='tip',
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN MAIN PAGES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/admin') as m:
 
        m.connect('admin_home', '', action='index')
 
        m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
 
                  action='add_repo')
 
    #==========================================================================
 
    # API V2
 
    #==========================================================================
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='api/api') as m:
 
        m.connect('api', '/api')
 

	
 
    #USER JOURNAL
 
    rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
 
                 controller='journal', action='index')
 
    rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
 
                 controller='journal', action='journal_rss')
 
    rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
 
                 controller='journal', action='journal_atom')
 

	
 
    rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal")
 

	
 
    rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal_rss")
 

	
 
    rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal_rss")
 

	
 
    rmap.connect('public_journal_atom',
 
                 '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal',
 
                 action="public_journal_atom")
 

	
 
    rmap.connect('public_journal_atom_old',
 
                 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
 
                 action="public_journal_atom")
 

	
 
    rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
 
                 controller='journal', action='toggle_following',
 
                 conditions=dict(method=["POST"]))
 

	
 
    #SEARCH
 
    rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
 
    rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX,
 
                 controller='search',
 
                 conditions=dict(function=check_repo))
 
    rmap.connect('search_repo', '/{repo_name:.*?}/search',
 
                 controller='search',
 
                 conditions=dict(function=check_repo),
 
                 )
 

	
 
    #LOGIN/LOGOUT/REGISTER/SIGN IN
 
    rmap.connect('authentication_token', '%s/authentication_token' % ADMIN_PREFIX, controller='login', action='authentication_token')
 
    rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
 
    rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
 
                 action='logout')
 

	
 
    rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
 
                 action='register')
 

	
 
    rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
 
                 controller='login', action='password_reset')
 

	
 
    rmap.connect('reset_password_confirmation',
 
                 '%s/password_reset_confirmation' % ADMIN_PREFIX,
 
                 controller='login', action='password_reset_confirmation')
 

	
 
    #FEEDS
 
    rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss',
 
                controller='feed', action='rss',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom',
 
                controller='feed', action='atom',
 
                conditions=dict(function=check_repo))
 

	
 
    #==========================================================================
 
    # REPOSITORY ROUTES
 
    #==========================================================================
 
    rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
 
                controller='admin/repos', action='repo_creating')
 
    rmap.connect('repo_check_home', '/{repo_name:.*?}/crepo_check',
 
                controller='admin/repos', action='repo_check')
 

	
 
    rmap.connect('summary_home', '/{repo_name:.*?}',
 
                controller='summary',
 
                conditions=dict(function=check_repo))
 

	
 
    # must be here for proper group/repo catching
 
    rmap.connect('repos_group_home', '/{group_name:.*}',
 
                controller='admin/repo_groups', action="show_by_name",
 
                conditions=dict(function=check_group))
 
    rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics',
 
                controller='summary', action='statistics',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('repo_size', '/{repo_name:.*?}/repo_size',
 
                controller='summary', action='repo_size',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('branch_tag_switcher', '/{repo_name:.*?}/branches-tags',
 
                 controller='home', action='branch_tag_switcher')
 
    rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data',
 
                 controller='home', action='repo_refs_data')
 

	
 
    rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}',
 
                controller='changeset', revision='tip',
 
                conditions=dict(function=check_repo))
 
    rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}',
 
                controller='changeset', revision='tip', action="changeset_children",
 
                conditions=dict(function=check_repo))
 
    rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}',
 
                controller='changeset', revision='tip', action="changeset_parents",
 
                conditions=dict(function=check_repo))
 

	
 
    # repo edit options
 
    rmap.connect("edit_repo", "/{repo_name:.*?}/settings",
 
                 controller='admin/repos', action="edit",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions_update",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 
    rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions_revoke",
 
                 conditions=dict(method=["DELETE"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields",
 
                 controller='admin/repos', action="edit_fields",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new",
 
                 controller='admin/repos', action="create_repo_field",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 
    rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}",
 
                 controller='admin/repos', action="delete_repo_field",
 
                 conditions=dict(method=["DELETE"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced",
 
                 controller='admin/repos', action="edit_advanced",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_locking", "/{repo_name:.*?}/settings/advanced/locking",
 
                 controller='admin/repos', action="edit_advanced_locking",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 
    rmap.connect('toggle_locking', "/{repo_name:.*?}/settings/advanced/locking_toggle",
 
                 controller='admin/repos', action="toggle_locking",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal",
 
                 controller='admin/repos', action="edit_advanced_journal",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork",
 
                 controller='admin/repos', action="edit_advanced_fork",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
 
                 controller='admin/repos', action="edit_caches",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
 
                 controller='admin/repos', action="edit_caches",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
 
                 controller='admin/repos', action="edit_remote",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
 
                 controller='admin/repos', action="edit_remote",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
 
                 controller='admin/repos', action="edit_statistics",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
 
                 controller='admin/repos', action="edit_statistics",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 
    #still working url for backward compat.
 
    rmap.connect('raw_changeset_home_depraced',
 
                 '/{repo_name:.*?}/raw-changeset/{revision}',
 
                 controller='changeset', action='changeset_raw',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    ## new URLs
 
    rmap.connect('changeset_raw_home',
 
                 '/{repo_name:.*?}/changeset-diff/{revision}',
 
                 controller='changeset', action='changeset_raw',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_patch_home',
 
                 '/{repo_name:.*?}/changeset-patch/{revision}',
 
                 controller='changeset', action='changeset_patch',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_download_home',
 
                 '/{repo_name:.*?}/changeset-download/{revision}',
 
                 controller='changeset', action='changeset_download',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_comment',
 
                 '/{repo_name:.*?}/changeset-comment/{revision}',
 
                controller='changeset', revision='tip', action='comment',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_comment_preview',
 
                 '/{repo_name:.*?}/changeset-comment-preview',
 
                controller='changeset', action='preview_comment',
 
                conditions=dict(function=check_repo, method=["POST"]))
 

	
 
    rmap.connect('changeset_comment_delete',
 
                 '/{repo_name:.*?}/changeset-comment-delete/{comment_id}',
 
                controller='changeset', action='delete_comment',
 
                conditions=dict(function=check_repo, method=["DELETE"]))
 

	
 
    rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}',
 
                 controller='changeset', action='changeset_info')
 

	
 
    rmap.connect('compare_home',
 
                 '/{repo_name:.*?}/compare',
 
                 controller='compare', action='index',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('compare_url',
 
                 '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}',
 
                 controller='compare', action='compare',
 
                 conditions=dict(function=check_repo),
 
                 requirements=dict(
 
                            org_ref_type='(branch|book|tag|rev|__other_ref_type__)',
 
                            other_ref_type='(branch|book|tag|rev|__org_ref_type__)')
 
                 )
 

	
 
    rmap.connect('pullrequest_home',
 
                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
 
                 action='index', conditions=dict(function=check_repo,
 
                                                 method=["GET"]))
 

	
 
    rmap.connect('pullrequest_repo_info',
 
                 '/{repo_name:.*?}/pull-request-repo-info',
 
                 controller='pullrequests', action='repo_info',
 
                 conditions=dict(function=check_repo, method=["GET"]))
 

	
 
    rmap.connect('pullrequest',
 
                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
 
                 action='create', conditions=dict(function=check_repo,
 
                                                  method=["POST"]))
 

	
 
    rmap.connect('pullrequest_show',
 
                 '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='',
 
                 controller='pullrequests',
 
                 action='show', conditions=dict(function=check_repo,
 
                                                method=["GET"]))
 
    rmap.connect('pullrequest_post',
 
                 '/{repo_name:.*?}/pull-request/{pull_request_id}',
 
                 controller='pullrequests',
 
                 action='post', conditions=dict(function=check_repo,
 
                                                method=["POST"]))
 
    rmap.connect('pullrequest_delete',
 
                 '/{repo_name:.*?}/pull-request/{pull_request_id}',
 
                 controller='pullrequests',
 
                 action='delete', conditions=dict(function=check_repo,
 
                                                method=["DELETE"]))
 

	
 
    rmap.connect('pullrequest_show_all',
 
                 '/{repo_name:.*?}/pull-request',
 
                 controller='pullrequests',
 
                 action='show_all', conditions=dict(function=check_repo,
 
                                                method=["GET"]))
 

	
 
    rmap.connect('my_pullrequests',
 
                 '/my_pullrequests',
 
                 controller='pullrequests',
 
                 action='show_my', conditions=dict(method=["GET"]))
 

	
 
    rmap.connect('pullrequest_comment',
 
                 '/{repo_name:.*?}/pull-request-comment/{pull_request_id}',
 
                 controller='pullrequests',
 
                 action='comment', conditions=dict(function=check_repo,
 
                                                method=["POST"]))
 

	
 
    rmap.connect('pullrequest_comment_delete',
 
                 '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete',
 
                controller='pullrequests', action='delete_comment',
 
                conditions=dict(function=check_repo, method=["DELETE"]))
 

	
 
    rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary',
 
                controller='summary', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('branches_home', '/{repo_name:.*?}/branches',
 
                controller='branches', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('tags_home', '/{repo_name:.*?}/tags',
 
                controller='tags', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('bookmarks_home', '/{repo_name:.*?}/bookmarks',
 
                controller='bookmarks', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_home', '/{repo_name:.*?}/changelog',
 
                controller='changelog', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_summary_home', '/{repo_name:.*?}/changelog_summary',
 
                controller='changelog', action='changelog_summary',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}',
 
                controller='changelog', f_path=None,
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}',
 
                controller='changelog', action='changelog_details',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}',
 
                controller='files', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}',
 
                controller='files', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_history_home',
 
                 '/{repo_name:.*?}/history/{revision}/{f_path:.*}',
 
                 controller='files', action='history', revision='tip', f_path='',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_authors_home',
 
                 '/{repo_name:.*?}/authors/{revision}/{f_path:.*}',
 
                 controller='files', action='authors', revision='tip', f_path='',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}',
 
                controller='files', action='diff', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}',
 
                controller='files', action='diff_2way', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_rawfile_home',
 
                 '/{repo_name:.*?}/rawfile/{revision}/{f_path:.*}',
 
                 controller='files', action='rawfile', revision='tip',
 
                 f_path='', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_raw_home',
 
                 '/{repo_name:.*?}/raw/{revision}/{f_path:.*}',
 
                 controller='files', action='raw', revision='tip', f_path='',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_annotate_home',
 
                 '/{repo_name:.*?}/annotate/{revision}/{f_path:.*}',
 
                 controller='files', action='index', revision='tip',
 
                 f_path='', annotate=True, conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_edit_home',
 
                 '/{repo_name:.*?}/edit/{revision}/{f_path:.*}',
 
                 controller='files', action='edit', revision='tip',
 
                 f_path='', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_add_home',
 
                 '/{repo_name:.*?}/add/{revision}/{f_path:.*}',
 
                 controller='files', action='add', revision='tip',
 
                 f_path='', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_delete_home',
 
                 '/{repo_name:.*?}/delete/{revision}/{f_path:.*}',
 
                 controller='files', action='delete', revision='tip',
 
                 f_path='', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_archive_home', '/{repo_name:.*?}/archive/{fname}',
 
                controller='files', action='archivefile',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_nodelist_home',
 
                 '/{repo_name:.*?}/nodelist/{revision}/{f_path:.*}',
 
                controller='files', action='nodelist',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('repo_fork_create_home', '/{repo_name:.*?}/fork',
 
                controller='forks', action='fork_create',
 
                conditions=dict(function=check_repo, method=["POST"]))
 

	
 
    rmap.connect('repo_fork_home', '/{repo_name:.*?}/fork',
 
                controller='forks', action='fork',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('repo_forks_home', '/{repo_name:.*?}/forks',
 
                 controller='forks', action='forks',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('repo_followers_home', '/{repo_name:.*?}/followers',
 
                 controller='followers', action='followers',
 
                 conditions=dict(function=check_repo))
 

	
 
    return rmap
kallithea/controllers/files.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.files
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Files controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import logging
 
import traceback
 
import tempfile
 
import shutil
 

	
 
from pylons import request, response, tmpl_context as c, url
 
from pylons.i18n.translation import _
 
from pylons.controllers.util import redirect
 
from kallithea.lib.utils import jsonify, action_logger
 

	
 
from kallithea.lib import diffs
 
from kallithea.lib import helpers as h
 

	
 
from kallithea.lib.compat import OrderedDict
 
from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_str,\
 
    str2bool
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import RepositoryError, \
 
    ChangesetDoesNotExistError, EmptyRepositoryError, \
 
    ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,\
 
    NodeDoesNotExistError, ChangesetError, NodeError
 
from kallithea.lib.vcs.nodes import FileNode
 

	
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.db import Repository
 

	
 
from kallithea.controllers.changeset import anchor_url, _ignorews_url,\
 
    _context_url, get_line_ctx, get_ignore_ws
 
from webob.exc import HTTPNotFound
 
from kallithea.lib.exceptions import NonRelativePathError
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FilesController(BaseRepoController):
 

	
 
    def __before__(self):
 
        super(FilesController, self).__before__()
 
        c.cut_off_limit = self.cut_off_limit
 

	
 
    def __get_cs(self, rev, silent_empty=False):
 
        """
 
        Safe way to get changeset if error occur it redirects to tip with
 
        proper message
 

	
 
        :param rev: revision to fetch
 
        :silent_empty: return None if repository is empty
 
        """
 

	
 
        try:
 
            return c.db_repo_scm_instance.get_changeset(rev)
 
        except EmptyRepositoryError as e:
 
            if silent_empty:
 
                return None
 
            url_ = url('files_add_home',
 
                       repo_name=c.repo_name,
 
                       revision=0, f_path='', anchor='edit')
 
            add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
 
            h.flash(h.literal(_('There are no files yet. %s') % add_new),
 
                    category='warning')
 
            raise HTTPNotFound()
 
        except(ChangesetDoesNotExistError, LookupError), e:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
    def __get_filenode(self, cs, path):
 
        """
 
        Returns file_node or raise HTTP error.
 

	
 
        :param cs: given changeset
 
        :param path: path to lookup
 
        """
 

	
 
        try:
 
            file_node = cs.get_node(path)
 
            if file_node.is_dir():
 
                raise RepositoryError('given path is a directory')
 
        except(ChangesetDoesNotExistError,), e:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
        return file_node
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name, revision, f_path, annotate=False):
 
        # redirect to given revision from form if given
 
        post_revision = request.POST.get('at_rev', None)
 
        if post_revision:
 
            cs = self.__get_cs(post_revision) # FIXME - unused!
 

	
 
        c.revision = revision
 
        c.changeset = self.__get_cs(revision)
 
        c.branch = request.GET.get('branch', None)
 
        c.f_path = f_path
 
        c.annotate = annotate
 
        cur_rev = c.changeset.revision
 

	
 
        # prev link
 
        try:
 
            prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
 
            c.url_prev = url('files_home', repo_name=c.repo_name,
 
                         revision=prev_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_prev += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_prev = '#'
 

	
 
        # next link
 
        try:
 
            next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
 
            c.url_next = url('files_home', repo_name=c.repo_name,
 
                     revision=next_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_next += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_next = '#'
 

	
 
        # files or dirs
 
        try:
 
            c.file = c.changeset.get_node(f_path)
 

	
 
            if c.file.is_file():
 
                c.load_full_history = False
 
                file_last_cs = c.file.last_changeset
 
                c.file_changeset = (c.changeset
 
                                    if c.changeset.revision < file_last_cs.revision
 
                                    else file_last_cs)
 
                #determine if we're on branch head
 
                _branches = c.db_repo_scm_instance.branches
 
                c.on_branch_head = revision in _branches.keys() + _branches.values()
 
                _hist = []
 
                c.file_history = []
 
                if c.load_full_history:
 
                    c.file_history, _hist = self._get_node_history(c.changeset, f_path)
 

	
 
                c.authors = []
 
                for a in set([x.author for x in _hist]):
 
                    c.authors.append((h.email(a), h.person(a)))
 
            else:
 
                c.authors = c.file_history = []
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return render('files/files_ypjax.html')
 

	
 
        # TODO: tags and bookmarks?
 
        c.revision_options = [(c.changeset.raw_id,
 
                              _('%s at %s') % (c.changeset.branch, h.short_id(c.changeset.raw_id)))] + \
 
            [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
 
        if c.db_repo_scm_instance.closed_branches:
 
            prefix = _('(closed)') + ' '
 
            c.revision_options += [('-', '-')] + \
 
                [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
 

	
 
        return render('files/files.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def history(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        f_path = f_path
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 

	
 
            res = []
 
            for obj in file_history:
 
                res.append({
 
                    'text': obj[1],
 
                    'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
 
                })
 

	
 
            data = {
 
                'more': False,
 
                'results': res
 
            }
 
            return data
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def authors(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        f_path = f_path
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 
            c.authors = []
 
            for a in set([x.author for x in _hist]):
 
                c.authors.append((h.email(a), h.person(a)))
 
            return render('files/files_history_box.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def rawfile(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        response.content_disposition = 'attachment; filename=%s' % \
 
            safe_str(f_path.split(Repository.url_sep())[-1])
 

	
 
        response.content_type = file_node.mimetype
 
        return file_node.content
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def raw(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        raw_mimetype_mapping = {
 
            # map original mimetype to a mimetype used for "show as raw"
 
            # you can also provide a content-disposition to override the
 
            # default "attachment" disposition.
 
            # orig_type: (new_type, new_dispo)
 

	
 
            # show images inline:
 
            'image/x-icon': ('image/x-icon', 'inline'),
 
            'image/png': ('image/png', 'inline'),
 
            'image/gif': ('image/gif', 'inline'),
 
            'image/jpeg': ('image/jpeg', 'inline'),
 
            'image/svg+xml': ('image/svg+xml', 'inline'),
 
        }
 

	
 
        mimetype = file_node.mimetype
 
        try:
 
            mimetype, dispo = raw_mimetype_mapping[mimetype]
 
        except KeyError:
 
            # we don't know anything special about this, handle it safely
 
            if file_node.is_binary:
 
                # do same as download raw for binary files
 
                mimetype, dispo = 'application/octet-stream', 'attachment'
 
            else:
 
                # do not just use the original mimetype, but force text/plain,
 
                # otherwise it would serve text/html and that might be unsafe.
 
                # Note: underlying vcs library fakes text/plain mimetype if the
 
                # mimetype can not be determined and it thinks it is not
 
                # binary.This might lead to erroneous text display in some
 
                # cases, but helps in other cases, like with text files
 
                # without extension.
 
                mimetype, dispo = 'text/plain', 'inline'
 

	
 
        if dispo == 'attachment':
 
            dispo = 'attachment; filename=%s' % \
 
                        safe_str(f_path.split(os.sep)[-1])
 

	
 
        response.content_disposition = dispo
 
        response.content_type = mimetype
 
        return file_node.content
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def delete(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        if repo.enable_locking and repo.locked[0]:
 
            h.flash(_('This repository has been locked by %s on %s')
 
                % (h.person_by_id(repo.locked[0]),
 
                   h.fmt_date(h.time_to_datetime(repo.locked[1]))),
 
                'warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip'))
 

	
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches.keys() + _branches.values():
 
            h.flash(_('You can only delete files with revision '
 
                      'being a valid branch '), category='warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 

	
 
        c.default_message = _('Deleted file %s via Kallithea') % (f_path)
 
        c.f_path = f_path
 
        node_path = f_path
 
        author = self.authuser.full_contact
 

	
 
        if r_post:
 
            message = r_post.get('message') or c.default_message
 

	
 
            try:
 
                nodes = {
 
                    node_path: {
 
                        'content': ''
 
                    }
 
                }
 
                self.scm_model.delete_nodes(
 
                    user=c.authuser.user_id, repo=c.db_repo,
 
                    message=message,
 
                    nodes=nodes,
 
                    parent_cs=c.cs,
 
                    author=author,
 
                )
 

	
 
                h.flash(_('Successfully deleted file %s') % f_path,
 
                        category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_delete.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def edit(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        if repo.enable_locking and repo.locked[0]:
 
            h.flash(_('This repository has been locked by %s on %s')
 
                % (h.person_by_id(repo.locked[0]),
 
                   h.fmt_date(h.time_to_datetime(repo.locked[1]))),
 
                'warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip'))
 

	
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches.keys() + _branches.values():
 
            h.flash(_('You can only edit files with revision '
 
                      'being a valid branch '), category='warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 

	
 
        if c.file.is_binary:
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                            revision=c.cs.raw_id, f_path=f_path))
 
        c.default_message = _('Edited file %s via Kallithea') % (f_path)
 
        c.f_path = f_path
 

	
 
        if r_post:
 

	
 
            old_content = c.file.content
 
            sl = old_content.splitlines(1)
 
            first_line = sl[0] if sl else ''
 
            # modes:  0 - Unix, 1 - Mac, 2 - DOS
 
            mode = detect_mode(first_line, 0)
 
            content = convert_line_endings(r_post.get('content', ''), mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            author = self.authuser.full_contact
 

	
 
            if content == old_content:
 
                h.flash(_('No changes'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            try:
 
                self.scm_model.commit_change(repo=c.db_repo_scm_instance,
 
                                             repo_name=repo_name, cs=c.cs,
 
                                             user=self.authuser.user_id,
 
                                             author=author, message=message,
 
                                             content=content, f_path=f_path)
 
                h.flash(_('Successfully committed to %s') % f_path,
 
                        category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_edit.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def add(self, repo_name, revision, f_path):
 

	
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo.enable_locking and repo.locked[0]:
 
            h.flash(_('This repository has been locked by %s on %s')
 
                % (h.person_by_id(repo.locked[0]),
 
                   h.fmt_date(h.time_to_datetime(repo.locked[1]))),
 
                  'warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip'))
 

	
 
        r_post = request.POST
 
        c.cs = self.__get_cs(revision, silent_empty=True)
 
        if c.cs is None:
 
            c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias)
 
        c.default_message = (_('Added file via Kallithea'))
 
        c.f_path = f_path
 

	
 
        if r_post:
 
            unix_mode = 0
 
            content = convert_line_endings(r_post.get('content', ''), unix_mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            filename = r_post.get('filename')
 
            location = r_post.get('location', '')
 
            file_obj = r_post.get('upload_file', None)
 

	
 
            if file_obj is not None and hasattr(file_obj, 'filename'):
 
                filename = file_obj.filename
 
                content = file_obj.file
 

	
 
                if hasattr(content, 'file'):
 
                    # non posix systems store real file under file attr
 
                    content = content.file
 

	
 
            if not content:
 
                h.flash(_('No content'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            if not filename:
 
                h.flash(_('No filename'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            #strip all crap out of file, just leave the basename
 
            filename = os.path.basename(filename)
 
            node_path = os.path.join(location, filename)
 
            author = self.authuser.full_contact
 

	
 
            try:
 
                nodes = {
 
                    node_path: {
 
                        'content': content
 
                    }
 
                }
 
                self.scm_model.create_nodes(
 
                    user=c.authuser.user_id, repo=c.db_repo,
 
                    message=message,
 
                    nodes=nodes,
 
                    parent_cs=c.cs,
 
                    author=author,
 
                )
 

	
 
                h.flash(_('Successfully committed to %s') % node_path,
 
                        category='success')
 
            except NonRelativePathError as e:
 
                h.flash(_('Location must be relative path and must not '
 
                          'contain .. in path'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            except (NodeError, NodeAlreadyExistsError) as e:
 
                h.flash(_(e), category='error')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_add.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def archivefile(self, repo_name, fname):
 
        fileformat = None
 
        revision = None
 
        ext = None
 
        subrepos = request.GET.get('subrepos') == 'true'
 

	
 
        for a_type, ext_data in settings.ARCHIVE_SPECS.items():
 
            archive_spec = fname.split(ext_data[1])
 
            if len(archive_spec) == 2 and archive_spec[1] == '':
 
                fileformat = a_type or ext_data[1]
 
                revision = archive_spec[0]
 
                ext = ext_data[1]
 

	
 
        try:
 
            dbrepo = RepoModel().get_by_repo_name(repo_name)
 
            if not dbrepo.enable_downloads:
 
                return _('Downloads disabled') # TODO: do something else?
 

	
 
            if c.db_repo_scm_instance.alias == 'hg':
 
                # patch and reset hooks section of UI config to not run any
 
                # hooks on fetching archives with subrepos
 
                for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'):
 
                    c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None)
 

	
 
            cs = c.db_repo_scm_instance.get_changeset(revision)
 
            content_type = settings.ARCHIVE_SPECS[fileformat][0]
 
        except ChangesetDoesNotExistError:
 
            return _('Unknown revision %s') % revision
 
        except EmptyRepositoryError:
 
            return _('Empty repository')
 
        except (ImproperArchiveTypeError, KeyError):
 
            return _('Unknown archive type')
 

	
 
        from kallithea import CONFIG
 
        rev_name = cs.raw_id[:12]
 
        archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')),
 
                                    safe_str(rev_name), ext)
 

	
 
        archive_path = None
 
        cached_archive_path = None
 
        archive_cache_dir = CONFIG.get('archive_cache_dir')
 
        if archive_cache_dir and not subrepos: # TOOD: subrepo caching?
 
            if not os.path.isdir(archive_cache_dir):
 
                os.makedirs(archive_cache_dir)
 
            cached_archive_path = os.path.join(archive_cache_dir, archive_name)
 
            if os.path.isfile(cached_archive_path):
 
                log.debug('Found cached archive in %s', cached_archive_path)
 
                archive_path = cached_archive_path
 
            else:
 
                log.debug('Archive %s is not yet cached', archive_name)
 

	
 
        if archive_path is None:
 
            # generate new archive
 
            fd, archive_path = tempfile.mkstemp()
 
            log.debug('Creating new temp archive in %s', archive_path)
 
            with os.fdopen(fd, 'wb') as stream:
 
                cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
 
                # stream (and thus fd) has been closed by cs.fill_archive
 
            if cached_archive_path is not None:
 
                # we generated the archive - move it to cache
 
                log.debug('Storing new archive in %s', cached_archive_path)
 
                shutil.move(archive_path, cached_archive_path)
 
                archive_path = cached_archive_path
 

	
 
        def get_chunked_archive(archive_path):
 
            stream = open(archive_path, 'rb')
 
            while True:
 
                data = stream.read(16 * 1024)
 
                if not data:
 
                    break
 
                yield data
 
            stream.close()
 
            if archive_path != cached_archive_path:
 
                log.debug('Destroying temp archive %s', archive_path)
 
                os.remove(archive_path)
 

	
 
        action_logger(user=c.authuser,
 
                      action='user_downloaded_archive:%s' % (archive_name),
 
                      repo=repo_name, ipaddr=self.ip_addr, commit=True)
 

	
 
        response.content_disposition = str('attachment; filename=%s' % (archive_name))
 
        response.content_type = str(content_type)
 
        return get_chunked_archive(archive_path)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def diff(self, repo_name, f_path):
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = request.GET.get('context', 3)
 
        diff2 = request.GET.get('diff2', '')
 
        diff1 = request.GET.get('diff1', '') or diff2
 
        c.action = request.GET.get('diff')
 
        c.no_changes = diff1 == diff2
 
        c.f_path = f_path
 
        c.big_diff = False
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.changes = OrderedDict()
 
        c.changes[diff2] = []
 

	
 
        #special case if we want a show rev only, it's impl here
 
        #to reduce JS and callbacks
 

	
 
        if request.GET.get('show_rev'):
 
            if str2bool(request.GET.get('annotate', 'False')):
 
                _url = url('files_annotate_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 
            else:
 
                _url = url('files_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 

	
 
            return redirect(_url)
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
 
                try:
 
                    node1 = c.changeset_1.get_node(f_path)
 
                    if node1.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node1, type(node1)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_1 = EmptyChangeset(cs=diff1,
 
                                                   revision=c.changeset_1.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node1 = FileNode(f_path, '', changeset=c.changeset_1)
 
            else:
 
                c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node1 = FileNode(f_path, '', changeset=c.changeset_1)
 

	
 
            if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
 
                try:
 
                    node2 = c.changeset_2.get_node(f_path)
 
                    if node2.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node2, type(node2)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_2 = EmptyChangeset(cs=diff2,
 
                                                   revision=c.changeset_2.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
            else:
 
                c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
        except (RepositoryError, NodeError):
 
            log.error(traceback.format_exc())
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                                f_path=f_path))
 

	
 
        if c.action == 'download':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 

	
 
            diff_name = '%s_vs_%s.diff' % (diff1, diff2)
 
            response.content_type = 'text/plain'
 
            response.content_disposition = (
 
                'attachment; filename=%s' % diff_name
 
            )
 
            return diff.as_raw()
 

	
 
        elif c.action == 'raw':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 
            response.content_type = 'text/plain'
 
            return diff.as_raw()
 

	
 
        else:
 
            fid = h.FID(diff2, node2.path)
 
            line_context_lcl = get_line_ctx(fid, request.GET)
 
            ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
 

	
 
            lim = request.GET.get('fulldiff') or self.cut_off_limit
 
            _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1,
 
                                         filenode_new=node2,
 
                                         cut_off_limit=lim,
 
                                         ignore_whitespace=ign_whitespace_lcl,
 
                                         line_context=line_context_lcl,
 
                                         enable_comments=False)
 
            op = ''
 
            filename = node1.path
 
            cs_changes = {
 
                'fid': [cs1, cs2, op, filename, diff, st]
 
            }
 
            c.changes = cs_changes
 

	
 
        return render('files/file_diff.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def diff_2way(self, repo_name, f_path):
 
        diff1 = request.GET.get('diff1', '')
 
        diff2 = request.GET.get('diff2', '')
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
 
                try:
 
                    node1 = c.changeset_1.get_node(f_path)
 
                    if node1.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node1, type(node1)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_1 = EmptyChangeset(cs=diff1,
 
                                                   revision=c.changeset_1.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node1 = FileNode(f_path, '', changeset=c.changeset_1)
 
            else:
 
                c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node1 = FileNode(f_path, '', changeset=c.changeset_1)
 

	
 
            if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
 
                try:
 
                    node2 = c.changeset_2.get_node(f_path)
 
                    if node2.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node2, type(node2)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_2 = EmptyChangeset(cs=diff2,
 
                                                   revision=c.changeset_2.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
            else:
 
                c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
        except ChangesetDoesNotExistError as e:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        c.node1 = node1
 
        c.node2 = node2
 
        c.cs1 = c.changeset_1
 
        c.cs2 = c.changeset_2
 

	
 
        return render('files/diff_2way.html')
 

	
 
    def _get_node_history(self, cs, f_path, changesets=None):
 
        """
 
        get changesets history for given node
 

	
 
        :param cs: changeset to calculate history
 
        :param f_path: path for node to calculate history for
 
        :param changesets: if passed don't calculate history and take
 
            changesets defined in this list
 
        """
 
        # calculate history based on tip
 
        tip_cs = c.db_repo_scm_instance.get_changeset()
 
        if changesets is None:
 
            try:
 
                changesets = tip_cs.get_file_history(f_path)
 
            except (NodeDoesNotExistError, ChangesetError):
 
                #this node is not present at tip !
 
                changesets = cs.get_file_history(f_path)
 
        hist_l = []
 

	
 
        changesets_group = ([], _("Changesets"))
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 
        for chs in changesets:
 
            #_branch = '(%s)' % chs.branch if (cs.repository.alias == 'hg') else ''
 
            _branch = chs.branch
 
            n_desc = '%s (%s)' % (h.show_id(chs), _branch)
 
            changesets_group[0].append((chs.raw_id, n_desc,))
 
        hist_l.append(changesets_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.branches.items():
 
            branches_group[0].append((chs, name),)
 
        hist_l.append(branches_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.tags.items():
 
            tags_group[0].append((chs, name),)
 
        hist_l.append(tags_group)
 

	
 
        return hist_l, changesets
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def nodelist(self, repo_name, revision, f_path):
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            cs = self.__get_cs(revision)
 
            _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path,
 
                                          flat=False)
 
            return {'nodes': _d + _f}
kallithea/lib/auth.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth
 
~~~~~~~~~~~~~~~~~~
 

	
 
authentication and permission libraries
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 
from __future__ import with_statement
 
import time
 
import os
 
import logging
 
import traceback
 
import hashlib
 
import itertools
 
import collections
 

	
 
from decorator import decorator
 

	
 
from pylons import url, request
 
from pylons.controllers.util import abort, redirect
 
from pylons.i18n.translation import _
 
from webhelpers.pylonslib import secure_form
 
from sqlalchemy import or_
 
from sqlalchemy.orm.exc import ObjectDeletedError
 
from sqlalchemy.orm import joinedload
 

	
 
from kallithea import __platform__, is_windows, is_unix
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model import meta
 
from kallithea.model.meta import Session
 
from kallithea.model.user import UserModel
 
from kallithea.model.db import User, Repository, Permission, \
 
    UserToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupMember, \
 
    RepoGroup, UserGroupRepoGroupToPerm, UserIpMap, UserGroupUserGroupToPerm, \
 
    UserGroup, UserApiKeys
 

	
 
from kallithea.lib.utils2 import safe_unicode, aslist
 
from kallithea.lib.utils import get_repo_slug, get_repo_group_slug, \
 
    get_user_group_slug, conditional_cache
 
from kallithea.lib.caching_query import FromCache
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PasswordGenerator(object):
 
    """
 
    This is a simple class for generating password from different sets of
 
    characters
 
    usage::
 

	
 
        passwd_gen = PasswordGenerator()
 
        #print 8-letter password containing only big and small letters
 
            of alphabet
 
        passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
 
    """
 
    ALPHABETS_NUM = r'''1234567890'''
 
    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
 
    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
 
    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
 
    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
 
        + ALPHABETS_NUM + ALPHABETS_SPECIAL
 
    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
 
    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
 
    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
 
    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
 

	
 
    def gen_password(self, length, alphabet=ALPHABETS_FULL):
 
        assert len(alphabet) <= 256, alphabet
 
        l = []
 
        while len(l) < length:
 
            i = ord(os.urandom(1))
 
            if i < len(alphabet):
 
                l.append(alphabet[i])
 
        return ''.join(l)
 

	
 

	
 
class KallitheaCrypto(object):
 

	
 
    @classmethod
 
    def hash_string(cls, str_):
 
        """
 
        Cryptographic function used for password hashing based on pybcrypt
 
        or Python's own OpenSSL wrapper on windows
 

	
 
        :param password: password to hash
 
        """
 
        if is_windows:
 
            return hashlib.sha256(str_).hexdigest()
 
        elif is_unix:
 
            import bcrypt
 
            return bcrypt.hashpw(str_, bcrypt.gensalt(10))
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 
    @classmethod
 
    def hash_check(cls, password, hashed):
 
        """
 
        Checks matching password with it's hashed value, runs different
 
        implementation based on platform it runs on
 

	
 
        :param password: password
 
        :param hashed: password in hashed form
 
        """
 

	
 
        if is_windows:
 
            return hashlib.sha256(password).hexdigest() == hashed
 
        elif is_unix:
 
            import bcrypt
 
            return bcrypt.hashpw(password, hashed) == hashed
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 

	
 
def get_crypt_password(password):
 
    return KallitheaCrypto.hash_string(password)
 

	
 

	
 
def check_password(password, hashed):
 
    return KallitheaCrypto.hash_check(password, hashed)
 

	
 

	
 

	
 
def _cached_perms_data(user_id, user_is_admin, user_inherit_default_permissions,
 
                       explicit, algo):
 
    RK = 'repositories'
 
    GK = 'repositories_groups'
 
    UK = 'user_groups'
 
    GLOBAL = 'global'
 
    PERM_WEIGHTS = Permission.PERM_WEIGHTS
 
    permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()}
 

	
 
    def _choose_perm(new_perm, cur_perm):
 
        new_perm_val = PERM_WEIGHTS[new_perm]
 
        cur_perm_val = PERM_WEIGHTS[cur_perm]
 
        if algo == 'higherwin':
 
            if new_perm_val > cur_perm_val:
 
                return new_perm
 
            return cur_perm
 
        elif algo == 'lowerwin':
 
            if new_perm_val < cur_perm_val:
 
                return new_perm
 
            return cur_perm
 

	
 
    #======================================================================
 
    # fetch default permissions
 
    #======================================================================
 
    default_user = User.get_by_username('default', cache=True)
 
    default_user_id = default_user.user_id
 

	
 
    default_repo_perms = Permission.get_default_perms(default_user_id)
 
    default_repo_groups_perms = Permission.get_default_group_perms(default_user_id)
 
    default_user_group_perms = Permission.get_default_user_group_perms(default_user_id)
 

	
 
    if user_is_admin:
 
        #==================================================================
 
        # admin users have all rights;
 
        # based on default permissions, just set everything to admin
 
        #==================================================================
 
        permissions[GLOBAL].add('hg.admin')
 
        permissions[GLOBAL].add('hg.create.write_on_repogroup.true')
 

	
 
        # repositories
 
        for perm in default_repo_perms:
 
            r_k = perm.UserRepoToPerm.repository.repo_name
 
            p = 'repository.admin'
 
            permissions[RK][r_k] = p
 

	
 
        # repository groups
 
        for perm in default_repo_groups_perms:
 
            rg_k = perm.UserRepoGroupToPerm.group.group_name
 
            p = 'group.admin'
 
            permissions[GK][rg_k] = p
 

	
 
        # user groups
 
        for perm in default_user_group_perms:
 
            u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
            p = 'usergroup.admin'
 
            permissions[UK][u_k] = p
 
        return permissions
 

	
 
    #==================================================================
 
    # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS
 
    #==================================================================
 

	
 
    # default global permissions taken from the default user
 
    default_global_perms = UserToPerm.query()\
 
        .filter(UserToPerm.user_id == default_user_id)\
 
        .options(joinedload(UserToPerm.permission))
 

	
 
    for perm in default_global_perms:
 
        permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # defaults for repositories, taken from default user
 
    for perm in default_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        if perm.Repository.private and not (perm.Repository.user_id == user_id):
 
            # disable defaults for private repos,
 
            p = 'repository.none'
 
        elif perm.Repository.user_id == user_id:
 
            # set admin if owner
 
            p = 'repository.admin'
 
        else:
 
            p = perm.Permission.permission_name
 

	
 
        permissions[RK][r_k] = p
 

	
 
    # defaults for repository groups taken from default user permission
 
    # on given group
 
    for perm in default_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        permissions[GK][rg_k] = p
 

	
 
    # defaults for user groups taken from default user permission
 
    # on given user group
 
    for perm in default_user_group_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        permissions[UK][u_k] = p
 

	
 
    #======================================================================
 
    # !! OVERRIDE GLOBALS !! with user permissions if any found
 
    #======================================================================
 
    # those can be configured from groups or users explicitly
 
    _configurable = set([
 
        'hg.fork.none', 'hg.fork.repository',
 
        'hg.create.none', 'hg.create.repository',
 
        'hg.usergroup.create.false', 'hg.usergroup.create.true'
 
    ])
 

	
 
    # USER GROUPS comes first
 
    # user group global permissions
 
    user_perms_from_users_groups = Session().query(UserGroupToPerm)\
 
        .options(joinedload(UserGroupToPerm.permission))\
 
        .join((UserGroupMember, UserGroupToPerm.users_group_id ==
 
               UserGroupMember.users_group_id))\
 
        .filter(UserGroupMember.user_id == user_id)\
 
        .join((UserGroup, UserGroupMember.users_group_id ==
 
               UserGroup.users_group_id))\
 
        .filter(UserGroup.users_group_active == True)\
 
        .order_by(UserGroupToPerm.users_group_id)\
 
        .all()
 
    # need to group here by groups since user can be in more than
 
    # one group
 
    _grouped = [[x, list(y)] for x, y in
 
                itertools.groupby(user_perms_from_users_groups,
 
                                  lambda x:x.users_group)]
 
    for gr, perms in _grouped:
 
        # since user can be in multiple groups iterate over them and
 
        # select the lowest permissions first (more explicit)
 
        ##TODO: do this^^
 
        if not gr.inherit_default_permissions:
 
            # NEED TO IGNORE all configurable permissions and
 
            # replace them with explicitly set
 
            permissions[GLOBAL] = permissions[GLOBAL]\
 
                                            .difference(_configurable)
 
        for perm in perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # user specific global permissions
 
    user_perms = Session().query(UserToPerm)\
 
            .options(joinedload(UserToPerm.permission))\
 
            .filter(UserToPerm.user_id == user_id).all()
 

	
 
    if not user_inherit_default_permissions:
 
        # NEED TO IGNORE all configurable permissions and
 
        # replace them with explicitly set
 
        permissions[GLOBAL] = permissions[GLOBAL]\
 
                                        .difference(_configurable)
 

	
 
        for perm in user_perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 
    ## END GLOBAL PERMISSIONS
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORIES !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository and
 
    # fill in his permission from it. _choose_perm decides of which
 
    # permission should be selected based on selected method
 
    #======================================================================
 

	
 
    # user group for repositories permissions
 
    user_repo_perms_from_users_groups = \
 
     Session().query(UserGroupRepoToPerm, Permission, Repository,)\
 
        .join((Repository, UserGroupRepoToPerm.repository_id ==
 
               Repository.repo_id))\
 
        .join((Permission, UserGroupRepoToPerm.permission_id ==
 
               Permission.permission_id))\
 
        .join((UserGroup, UserGroupRepoToPerm.users_group_id ==
 
               UserGroup.users_group_id))\
 
        .filter(UserGroup.users_group_active == True)\
 
        .join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
 
               UserGroupMember.users_group_id))\
 
        .filter(UserGroupMember.user_id == user_id)\
 
        .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_repo_perms_from_users_groups:
 
        r_k = perm.UserGroupRepoToPerm.repository.repo_name
 
        multiple_counter[r_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[RK][r_k]
 

	
 
        if perm.Repository.user_id == user_id:
 
            # set admin if owner
 
            p = 'repository.admin'
 
        else:
 
            if multiple_counter[r_k] > 1:
 
                p = _choose_perm(p, cur_perm)
 
        permissions[RK][r_k] = p
 

	
 
    # user explicit permissions for repositories, overrides any specified
 
    # by the group permission
 
    user_repo_perms = Permission.get_default_perms(user_id)
 
    for perm in user_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        cur_perm = permissions[RK][r_k]
 
        # set admin if owner
 
        if perm.Repository.user_id == user_id:
 
            p = 'repository.admin'
 
        else:
 
            p = perm.Permission.permission_name
 
            if not explicit:
 
                p = _choose_perm(p, cur_perm)
 
        permissions[RK][r_k] = p
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORY GROUPS !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository groups and
 
    # fill in his permission from it. _choose_perm decides of which
 
    # permission should be selected based on selected method
 
    #======================================================================
 
    # user group for repo groups permissions
 
    user_repo_group_perms_from_users_groups = \
 
     Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup)\
 
     .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
     .join((Permission, UserGroupRepoGroupToPerm.permission_id
 
            == Permission.permission_id))\
 
     .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id ==
 
            UserGroup.users_group_id))\
 
     .filter(UserGroup.users_group_active == True)\
 
     .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
 
            == UserGroupMember.users_group_id))\
 
     .filter(UserGroupMember.user_id == user_id)\
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_repo_group_perms_from_users_groups:
 
        g_k = perm.UserGroupRepoGroupToPerm.group.group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[GK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[GK][g_k] = p
 

	
 
    # user explicit permissions for repository groups
 
    user_repo_groups_perms = Permission.get_default_group_perms(user_id)
 
    for perm in user_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[GK][rg_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[GK][rg_k] = p
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR USER GROUPS !!
 
    #======================================================================
 
    # user group for user group permissions
 
    user_group_user_groups_perms = \
 
     Session().query(UserGroupUserGroupToPerm, Permission, UserGroup)\
 
     .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
 
            == UserGroup.users_group_id))\
 
     .join((Permission, UserGroupUserGroupToPerm.permission_id
 
            == Permission.permission_id))\
 
     .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
 
            == UserGroupMember.users_group_id))\
 
     .filter(UserGroupMember.user_id == user_id)\
 
     .join((UserGroup, UserGroupMember.users_group_id ==
 
            UserGroup.users_group_id), aliased=True, from_joinpoint=True)\
 
     .filter(UserGroup.users_group_active == True)\
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_group_user_groups_perms:
 
        g_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][g_k] = p
 

	
 
    #user explicit permission for user groups
 
    user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
 
    for perm in user_user_groups_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][u_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][u_k] = p
 

	
 
    return permissions
 

	
 

	
 
def allowed_api_access(controller_name, whitelist=None, api_key=None):
 
    """
 
    Check if given controller_name is in whitelist API access
 
    """
 
    if not whitelist:
 
        from kallithea import CONFIG
 
        whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'),
 
                           sep=',')
 
        log.debug('whitelist of API access is: %s', whitelist)
 
    api_access_valid = controller_name in whitelist
 
    if api_access_valid:
 
        log.debug('controller:%s is in API whitelist', controller_name)
 
    else:
 
        msg = 'controller: %s is *NOT* in API whitelist' % (controller_name)
 
        if api_key:
 
            #if we use API key and don't have access it's a warning
 
            log.warning(msg)
 
        else:
 
            log.debug(msg)
 
    return api_access_valid
 

	
 

	
 
class AuthUser(object):
 
    """
 
    Represents a Kallithea user, including various authentication and
 
    authorization information. Typically used to store the current user,
 
    but is also used as a generic user information data structure in
 
    parts of the code, e.g. user management.
 

	
 
    Constructed from a database `User` object, a user ID or cookie dict,
 
    it looks up the user (if needed) and copies all attributes to itself,
 
    adding various non-persistent data. If lookup fails but anonymous
 
    access to Kallithea is enabled, the default user is loaded instead.
 

	
 
    `AuthUser` does not by itself authenticate users and the constructor
 
    sets the `is_authenticated` field to False, except when falling back
 
    to the default anonymous user (if enabled). It's up to other parts
 
    of the code to check e.g. if a supplied password is correct, and if
 
    so, set `is_authenticated` to True.
 

	
 
    However, `AuthUser` does refuse to load a user that is not `active`.
 
    """
 

	
 
    def __init__(self, user_id=None, dbuser=None,
 
            is_external_auth=False):
 

	
 
        self.is_authenticated = False
 
        self.is_external_auth = is_external_auth
 

	
 
        user_model = UserModel()
 
        self.anonymous_user = User.get_default_user(cache=True)
 

	
 
        # These attributes will be overriden by fill_data, below, unless the
 
        # requested user cannot be found and the default anonymous user is
 
        # not enabled.
 
        self.user_id = None
 
        self.username = None
 
        self.api_key = None
 
        self.name = ''
 
        self.lastname = ''
 
        self.email = ''
 
        self.admin = False
 
        self.inherit_default_permissions = False
 

	
 
        # Look up database user, if necessary.
 
        if user_id is not None:
 
            log.debug('Auth User lookup by USER ID %s', user_id)
 
            dbuser = user_model.get(user_id)
 
        else:
 
            # Note: dbuser is allowed to be None.
 
            log.debug('Auth User lookup by database user %s', dbuser)
 

	
 
        is_user_loaded = self._fill_data(dbuser)
 

	
 
        # If user cannot be found, try falling back to anonymous.
 
        if not is_user_loaded:
 
            is_user_loaded =  self._fill_data(self.anonymous_user)
 

	
 
        # The anonymous user is always "logged in".
 
        if self.user_id == self.anonymous_user.user_id:
 
            self.is_authenticated = True
 

	
 
        if not self.username:
 
            self.username = 'None'
 

	
 
        log.debug('Auth User is now %s', self)
 

	
 
    def _fill_data(self, dbuser):
 
        """
 
        Copies database fields from a `db.User` to this `AuthUser`. Does
 
        not copy `api_keys` and `permissions` attributes.
 

	
 
        Checks that `dbuser` is `active` (and not None) before copying;
 
        returns True on success.
 
        """
 
        if dbuser is not None and dbuser.active:
 
            log.debug('filling %s data', dbuser)
 
            for k, v in dbuser.get_dict().iteritems():
 
                assert k not in ['api_keys', 'permissions']
 
                setattr(self, k, v)
 
            return True
 
        return False
 

	
 
    @LazyProperty
 
    def permissions(self):
 
        return self.__get_perms(user=self, cache=False)
 

	
 
    @property
 
    def api_keys(self):
 
        return self._get_api_keys()
 

	
 
    def __get_perms(self, user, explicit=True, algo='higherwin', cache=False):
 
        """
 
        Fills user permission attribute with permissions taken from database
 
        works for permissions given for repositories, and for permissions that
 
        are granted to groups
 

	
 
        :param user: `AuthUser` instance
 
        :param explicit: In case there are permissions both for user and a group
 
            that user is part of, explicit flag will define if user will
 
            explicitly override permissions from group, if it's False it will
 
            make decision based on the algo
 
        :param algo: algorithm to decide what permission should be choose if
 
            it's multiple defined, eg user in two different groups. It also
 
            decides if explicit flag is turned off how to specify the permission
 
            for case when user is in a group + have defined separate permission
 
        """
 
        user_id = user.user_id
 
        user_is_admin = user.is_admin
 
        user_inherit_default_permissions = user.inherit_default_permissions
 

	
 
        log.debug('Getting PERMISSION tree')
 
        compute = conditional_cache('short_term', 'cache_desc',
 
                                    condition=cache, func=_cached_perms_data)
 
        return compute(user_id, user_is_admin,
 
                       user_inherit_default_permissions, explicit, algo)
 

	
 
    def _get_api_keys(self):
 
        api_keys = [self.api_key]
 
        for api_key in UserApiKeys.query()\
 
                .filter(UserApiKeys.user_id == self.user_id)\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time())).all():
 
            api_keys.append(api_key.api_key)
 

	
 
        return api_keys
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def repositories_admin(self):
 
        """
 
        Returns list of repositories you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['repositories'].iteritems()
 
                if x[1] == 'repository.admin']
 

	
 
    @property
 
    def repository_groups_admin(self):
 
        """
 
        Returns list of repository groups you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['repositories_groups'].iteritems()
 
                if x[1] == 'group.admin']
 

	
 
    @property
 
    def user_groups_admin(self):
 
        """
 
        Returns list of user groups you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['user_groups'].iteritems()
 
                if x[1] == 'usergroup.admin']
 

	
 
    @staticmethod
 
    def check_ip_allowed(user, ip_addr):
 
        """
 
        Check if the given IP address (a `str`) is allowed for the given
 
        user (an `AuthUser` or `db.User`).
 
        """
 
        allowed_ips = AuthUser.get_allowed_ips(user.user_id, cache=True,
 
            inherit_from_default=user.inherit_default_permissions)
 
        if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
 
            log.debug('IP:%s is in range of %s', ip_addr, allowed_ips)
 
            return True
 
        else:
 
            log.info('Access for IP:%s forbidden, '
 
                     'not in %s' % (ip_addr, allowed_ips))
 
            return False
 

	
 
    def __repr__(self):
 
        return "<AuthUser('id:%s[%s] auth:%s')>"\
 
            % (self.user_id, self.username, self.is_authenticated)
 

	
 
    def set_authenticated(self, authenticated=True):
 
        if self.user_id != self.anonymous_user.user_id:
 
            self.is_authenticated = authenticated
 

	
 
    def to_cookie(self):
 
        """ Serializes this login session to a cookie `dict`. """
 
        return {
 
            'user_id': self.user_id,
 
            'is_authenticated': self.is_authenticated,
 
            'is_external_auth': self.is_external_auth,
 
        }
 

	
 
    @staticmethod
 
    def from_cookie(cookie):
 
        """
 
        Deserializes an `AuthUser` from a cookie `dict`.
 
        """
 

	
 
        au = AuthUser(
 
            user_id=cookie.get('user_id'),
 
            is_external_auth=cookie.get('is_external_auth', False),
 
        )
 
        if not au.is_authenticated and au.user_id is not None:
 
            # user is not authenticated and not empty
 
            au.set_authenticated(cookie.get('is_authenticated'))
 
        return au
 

	
 
    @classmethod
 
    def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
 
        _set = set()
 

	
 
        if inherit_from_default:
 
            default_ips = UserIpMap.query().filter(UserIpMap.user ==
 
                                            User.get_default_user(cache=True))
 
            if cache:
 
                default_ips = default_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_default"))
 

	
 
            # populate from default user
 
            for ip in default_ips:
 
                try:
 
                    _set.add(ip.ip_addr)
 
                except ObjectDeletedError:
 
                    # since we use heavy caching sometimes it happens that we get
 
                    # deleted objects here, we just skip them
 
                    pass
 

	
 
        user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
 
        if cache:
 
            user_ips = user_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_%s" % user_id))
 

	
 
        for ip in user_ips:
 
            try:
 
                _set.add(ip.ip_addr)
 
            except ObjectDeletedError:
 
                # since we use heavy caching sometimes it happens that we get
 
                # deleted objects here, we just skip them
 
                pass
 
        return _set or set(['0.0.0.0/0', '::/0'])
 

	
 

	
 
def set_available_permissions(config):
 
    """
 
    This function will propagate pylons globals with all available defined
 
    permission given in db. We don't want to check each time from db for new
 
    permissions since adding a new permission also requires application restart
 
    ie. to decorate new views with the newly created permission
 

	
 
    :param config: current pylons config instance
 

	
 
    """
 
    log.info('getting information about all available permissions')
 
    try:
 
        sa = meta.Session
 
        all_perms = sa.query(Permission).all()
 
        config['available_permissions'] = [x.permission_name for x in all_perms]
 
    finally:
 
        meta.Session.remove()
 

	
 

	
 
#==============================================================================
 
# CHECK DECORATORS
 
#==============================================================================
 

	
 
def redirect_to_login(message=None):
 
    from kallithea.lib import helpers as h
 
    p = url.current()
 
    if message:
 
        h.flash(h.literal(message), category='warning')
 
    log.debug('Redirecting to login page, origin: %s', p)
 
    return redirect(url('login_home', came_from=p, **request.GET))
 

	
 
class LoginRequired(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page
 

	
 
    :param api_access: if enabled this checks only for valid auth token
 
        and grants access based on valid token
 
    """
 

	
 
    def __init__(self, api_access=False):
 
        self.api_access = api_access
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        controller = fargs[0]
 
        user = controller.authuser
 
        loc = "%s:%s" % (controller.__class__.__name__, func.__name__)
 
        log.debug('Checking access for user %s @ %s', user, loc)
 

	
 
        if not AuthUser.check_ip_allowed(user, controller.ip_addr):
 
            return redirect_to_login(_('IP %s not allowed') % controller.ip_addr)
 

	
 
        # check if we used an API key and it's a valid one
 
        api_key = request.GET.get('api_key')
 
        if api_key is not None:
 
            # explicit controller is enabled or API is in our whitelist
 
            if self.api_access or allowed_api_access(loc, api_key=api_key):
 
                if api_key in user.api_keys:
 
                    log.info('user %s authenticated with API key ****%s @ %s',
 
                             user, api_key[-4:], loc)
 
                    return func(*fargs, **fkwargs)
 
                else:
 
                    log.warning('API key ****%s is NOT valid', api_key[-4:])
 
                    return redirect_to_login(_('Invalid API key'))
 
            else:
 
                # controller does not allow API access
 
                log.warning('API access to %s is not allowed', loc)
 
                return abort(403)
 

	
 
        # Only allow the following HTTP request methods. (We sometimes use POST
 
        # requests with a '_method' set to 'PUT' or 'DELETE'; but that is only
 
        # used for the route lookup, and does not affect request.method.)
 
        if request.method not in ['GET', 'HEAD', 'POST', 'PUT']:
 
            return abort(405)
 

	
 
        # CSRF protection: Whenever a request has ambient authority (whether
 
        # through a session cookie or its origin IP address), it must include
 
        # the correct token, unless the HTTP method is GET or HEAD (and thus
 
        # guaranteed to be side effect free. In practice, the only situation
 
        # where we allow side effects without ambient authority is when the
 
        # authority comes from an API key; and that is handled above.
 
        if request.method not in ['GET', 'HEAD']:
 
            token = request.POST.get(secure_form.token_key)
 
            if not token or token != secure_form.authentication_token():
 
                log.error('CSRF check failed')
 
                return abort(403)
 

	
 
        # WebOb already ignores request payload parameters for anything other
 
        # than POST/PUT, but double-check since other Kallithea code relies on
 
        # this assumption.
 
        if request.method not in ['POST', 'PUT'] and request.POST:
 
            log.error('%r request with payload parameters; WebOb should have stopped this', request.method)
 
            return abort(400)
 

	
 
        # regular user authentication
 
        if user.is_authenticated:
 
            log.info('user %s authenticated with regular auth @ %s', user, loc)
 
            return func(*fargs, **fkwargs)
 
        else:
 
            log.warning('user %s NOT authenticated with regular auth @ %s', user, loc)
 
            return redirect_to_login()
 

	
 
class NotAnonymous(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page"""
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        self.user = cls.authuser
 

	
 
        log.debug('Checking if user is not anonymous @%s', cls)
 

	
 
        anonymous = self.user.username == User.DEFAULT_USER
 

	
 
        if anonymous:
 
            return redirect_to_login(_('You need to be a registered user to '
 
                    'perform this action'))
 
        else:
 
            return func(*fargs, **fkwargs)
 

	
 

	
 
class PermsDecorator(object):
 
    """Base class for controller decorators"""
 

	
 
    def __init__(self, *required_perms):
 
        self.required_perms = set(required_perms)
 
        self.user_perms = None
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        self.user = cls.authuser
 
        self.user_perms = self.user.permissions
 
        log.debug('checking %s permissions %s for %s %s',
 
          self.__class__.__name__, self.required_perms, cls, self.user)
 

	
 
        if self.check_permissions():
 
            log.debug('Permission granted for %s %s', cls, self.user)
 
            return func(*fargs, **fkwargs)
 

	
 
        else:
 
            log.debug('Permission denied for %s %s', cls, self.user)
 
            anonymous = self.user.username == User.DEFAULT_USER
 

	
 
            if anonymous:
 
                return redirect_to_login(_('You need to be signed in to view this page'))
 
            else:
 
                # redirect with forbidden ret code
 
                return abort(403)
 

	
 
    def check_permissions(self):
 
        """Dummy function for overriding"""
 
        raise Exception('You have to write this function in child class')
 

	
 

	
 
class HasPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates. All of them
 
    have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        if self.required_perms.issubset(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates. In order to
 
    fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        if self.required_perms.intersection(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    repository. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        repo_name = get_repo_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories'][repo_name]])
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    repository. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        repo_name = get_repo_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories'][repo_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    repository group. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_repo_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    repository group. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_repo_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    user group. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_user_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['user_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    user group. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_user_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['user_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# CHECK FUNCTIONS
 
#==============================================================================
 
class PermsFunction(object):
 
    """Base function for other check functions"""
 

	
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 
        self.user_perms = None
 
        self.repo_name = None
 
        self.group_name = None
 

	
 
    def __call__(self, check_location='', user=None):
 
        if not user:
 
            #TODO: remove this someday,put as user as attribute here
 
            user = request.user
 

	
 
        # init auth user if not already given
 
        if not isinstance(user, AuthUser):
 
            user = AuthUser(user.user_id)
 

	
 
        cls_name = self.__class__.__name__
 
        check_scope = {
 
            'HasPermissionAll': '',
 
            'HasPermissionAny': '',
 
            'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
 
            'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
 
            'HasRepoGroupPermissionAll': 'group:%s' % self.group_name,
 
            'HasRepoGroupPermissionAny': 'group:%s' % self.group_name,
 
        }.get(cls_name, '?')
 
        log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
 
                  self.required_perms, user, check_scope,
 
                  check_location or 'unspecified location')
 
        if not user:
 
            log.debug('Empty request user')
 
            return False
 
        self.user_perms = user.permissions
 
        if self.check_permissions():
 
            log.debug('Permission to %s granted for user: %s @ %s',
 
                      check_scope, user,
 
                         check_location or 'unspecified location')
 
            return True
 

	
 
        else:
 
            log.debug('Permission to %s denied for user: %s @ %s',
 
                      check_scope, user,
 
                         check_location or 'unspecified location')
 
            return False
 

	
 
    def check_permissions(self):
 
        """Dummy function for overriding"""
 
        raise Exception('You have to write this function in child class')
 

	
 

	
 
class HasPermissionAll(PermsFunction):
 
    def check_permissions(self):
 
        if self.required_perms.issubset(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAny(PermsFunction):
 
    def check_permissions(self):
 
        if self.required_perms.intersection(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAll(PermsFunction):
 
    def __call__(self, repo_name=None, check_location='', user=None):
 
        self.repo_name = repo_name
 
        return super(HasRepoPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        if not self.repo_name:
 
            self.repo_name = get_repo_slug(request)
 

	
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories'][self.repo_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAny(PermsFunction):
 
    def __call__(self, repo_name=None, check_location='', user=None):
 
        self.repo_name = repo_name
 
        return super(HasRepoPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        if not self.repo_name:
 
            self.repo_name = get_repo_slug(request)
 

	
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories'][self.repo_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAny(PermsFunction):
 
    def __call__(self, group_name=None, check_location='', user=None):
 
        self.group_name = group_name
 
        return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories_groups'][self.group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAll(PermsFunction):
 
    def __call__(self, group_name=None, check_location='', user=None):
 
        self.group_name = group_name
 
        return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories_groups'][self.group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAny(PermsFunction):
 
    def __call__(self, user_group_name=None, check_location='', user=None):
 
        self.user_group_name = user_group_name
 
        return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['user_groups'][self.user_group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAll(PermsFunction):
 
    def __call__(self, user_group_name=None, check_location='', user=None):
 
        self.user_group_name = user_group_name
 
        return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['user_groups'][self.user_group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
 
#==============================================================================
 
class HasPermissionAnyMiddleware(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, user, repo_name):
 
        # repo_name MUST be unicode, since we handle keys in permission
 
        # dict by unicode
 
        repo_name = safe_unicode(repo_name)
 
        usr = AuthUser(user.user_id)
 
        self.user_perms = set([usr.permissions['repositories'][repo_name]])
 
        self.username = user.username
 
        self.repo_name = repo_name
 
        return self.check_permissions()
 

	
 
    def check_permissions(self):
 
        log.debug('checking VCS protocol '
 
                  'permissions %s for user:%s repository:%s', self.user_perms,
 
                                                self.username, self.repo_name)
 
        if self.required_perms.intersection(self.user_perms):
 
            log.debug('Permission to repo: %s granted for user: %s @ %s',
 
                      self.repo_name, self.username, 'PermissionMiddleware')
 
            return True
 
        log.debug('Permission to repo: %s denied for user: %s @ %s',
 
                  self.repo_name, self.username, 'PermissionMiddleware')
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE API AUTH
 
#==============================================================================
 
class _BaseApiPerm(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, check_location=None, user=None, repo_name=None,
 
                 group_name=None):
 
        cls_name = self.__class__.__name__
 
        check_scope = 'user:%s' % (user)
 
        if repo_name:
 
            check_scope += ', repo:%s' % (repo_name)
 

	
 
        if group_name:
 
            check_scope += ', repo group:%s' % (group_name)
 

	
 
        log.debug('checking cls:%s %s %s @ %s',
 
                  cls_name, self.required_perms, check_scope, check_location)
 
        if not user:
 
            log.debug('Empty User passed into arguments')
 
            return False
 

	
 
        ## process user
 
        if not isinstance(user, AuthUser):
 
            user = AuthUser(user.user_id)
 
        if not check_location:
 
            check_location = 'unspecified'
 
        if self.check_permissions(user.permissions, repo_name, group_name):
 
            log.debug('Permission to %s granted for user: %s @ %s',
 
                      check_scope, user, check_location)
 
            return True
 

	
 
        else:
 
            log.debug('Permission to %s denied for user: %s @ %s',
 
                      check_scope, user, check_location)
 
            return False
 

	
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        """
 
        implement in child class should return True if permissions are ok,
 
        False otherwise
 

	
 
        :param perm_defs: dict with permission definitions
 
        :param repo_name: repo name
 
        """
 
        raise NotImplementedError()
 

	
 

	
 
class HasPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        if self.required_perms.issubset(perm_defs.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        if self.required_perms.intersection(perm_defs.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories'][repo_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.issubset(_user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories'][repo_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.intersection(_user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories_groups'][group_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.intersection(_user_perms):
 
            return True
 
        return False
 

	
 
class HasRepoGroupPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories_groups'][group_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.issubset(_user_perms):
 
            return True
 
        return False
 

	
 
def check_ip_access(source_ip, allowed_ips=None):
 
    """
 
    Checks if source_ip is a subnet of any of allowed_ips.
 

	
 
    :param source_ip:
 
    :param allowed_ips: list of allowed ips together with mask
 
    """
 
    from kallithea.lib import ipaddr
 
    log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
 
    if isinstance(allowed_ips, (tuple, list, set)):
 
        for ip in allowed_ips:
 
            if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip):
 
                log.debug('IP %s is network %s',
 
                          ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip))
 
                return True
 
    return False
kallithea/lib/indexers/daemon.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.indexers.daemon
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
A daemon will read from task table and run tasks
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 26, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 
import traceback
 

	
 
from shutil import rmtree
 
from time import mktime
 

	
 
from os.path import dirname as dn
 
from os.path import join as jn
 

	
 
# Add location of top level folder to sys.path
 
project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
 
sys.path.append(project_path)
 

	
 
from kallithea.config.conf import INDEX_EXTENSIONS
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.db import Repository
 
from kallithea.lib.utils2 import safe_unicode, safe_str
 
from kallithea.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \
 
    CHGSET_IDX_NAME
 

	
 
from kallithea.lib.vcs.exceptions import ChangesetError, RepositoryError, \
 
    NodeDoesNotExistError
 

	
 
from whoosh.index import create_in, open_dir, exists_in
 
from whoosh.query import *
 
from whoosh.qparser import QueryParser
 

	
 
log = logging.getLogger('whoosh_indexer')
 

	
 

	
 
class WhooshIndexingDaemon(object):
 
    """
 
    Daemon for atomic indexing jobs
 
    """
 

	
 
    def __init__(self, indexname=IDX_NAME, index_location=None,
 
                 repo_location=None, sa=None, repo_list=None,
 
                 repo_update_list=None):
 
        self.indexname = indexname
 

	
 
        self.index_location = index_location
 
        if not index_location:
 
            raise Exception('You have to provide index location')
 

	
 
        self.repo_location = repo_location
 
        if not repo_location:
 
            raise Exception('You have to provide repositories location')
 

	
 
        self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)
 

	
 
        #filter repo list
 
        if repo_list:
 
            #Fix non-ascii repo names to unicode
 
            repo_list = map(safe_unicode, repo_list)
 
            self.filtered_repo_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_list:
 
                    self.filtered_repo_paths[repo_name] = repo
 

	
 
            self.repo_paths = self.filtered_repo_paths
 

	
 
        #filter update repo list
 
        self.filtered_repo_update_paths = {}
 
        if repo_update_list:
 
            self.filtered_repo_update_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_update_list:
 
                    self.filtered_repo_update_paths[repo_name] = repo
 
            self.repo_paths = self.filtered_repo_update_paths
 

	
 
        self.initial = True
 
        if not os.path.isdir(self.index_location):
 
            os.makedirs(self.index_location)
 
            log.info('Cannot run incremental index since it does not '
 
                     'yet exist running full build')
 
        elif not exists_in(self.index_location, IDX_NAME):
 
            log.info('Running full index build as the file content '
 
                     'index does not exist')
 
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
 
            log.info('Running full index build as the changeset '
 
                     'index does not exist')
 
        else:
 
            self.initial = False
 

	
 
    def _get_index_revision(self, repo):
 
        db_repo = Repository.get_by_repo_name(repo.name_unicode)
 
        landing_rev = 'tip'
 
        if db_repo:
 
            _rev_type, _rev = db_repo.landing_rev
 
            landing_rev = _rev
 
        return landing_rev
 

	
 
    def _get_index_changeset(self, repo, index_rev=None):
 
        if not index_rev:
 
            index_rev = self._get_index_revision(repo)
 
        cs = repo.get_changeset(index_rev)
 
        return cs
 

	
 
    def get_paths(self, repo):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 
        """
 
        index_paths_ = set()
 
        try:
 
            cs = self._get_index_changeset(repo)
 
            for _topnode, _dirs, files in cs.walk('/'):
 
                for f in files:
 
                    index_paths_.add(jn(safe_str(repo.path), safe_str(f.path)))
 

	
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            pass
 
        return index_paths_
 

	
 
    def get_node(self, repo, path, index_rev=None):
 
        """
 
        gets a filenode based on given full path. It operates on string for
 
        hg git compatibility.
 

	
 
        :param repo: scm repo instance
 
        :param path: full path including root location
 
        :return: FileNode
 
        """
 
        # FIXME: paths should be normalized ... or even better: don't include repo.path
 
        path = safe_str(path)
 
        repo_path = safe_str(repo.path)
 
        assert path.startswith(repo_path)
 
        assert path[len(repo_path)] in (os.path.sep, os.path.altsep)
 
        node_path = path[len(repo_path) + 1:]
 
        cs = self._get_index_changeset(repo, index_rev=index_rev)
 
        node = cs.get_node(node_path)
 
        return node
 

	
 
    def get_node_mtime(self, node):
 
        return mktime(node.last_changeset.date.timetuple())
 

	
 
    def add_doc(self, writer, path, repo, repo_name, index_rev=None):
 
        """
 
        Adding doc to writer this function itself fetches data from
 
        the instance of vcs backend
 
        """
 

	
 
        node = self.get_node(repo, path, index_rev)
 
        indexed = indexed_w_content = 0
 
        # we just index the content of chosen files, and skip binary files
 
        if node.extension in INDEX_EXTENSIONS and not node.is_binary:
 
            u_content = node.content
 
            if not isinstance(u_content, unicode):
 
                log.warning('  >> %s Could not get this content as unicode '
 
                            'replacing with empty content' % path)
 
                u_content = u''
 
            else:
 
                log.debug('    >> %s [WITH CONTENT]', path)
 
                indexed_w_content += 1
 

	
 
        else:
 
            log.debug('    >> %s', path)
 
            # just index file name without it's content
 
            u_content = u''
 
            indexed += 1
 

	
 
        p = safe_unicode(path)
 
        writer.add_document(
 
            fileid=p,
 
            owner=unicode(repo.contact),
 
            repository=safe_unicode(repo_name),
 
            path=p,
 
            content=u_content,
 
            modtime=self.get_node_mtime(node),
 
            extension=node.extension
 
        )
 
        return indexed, indexed_w_content
 

	
 
    def index_changesets(self, writer, repo_name, repo, start_rev=None):
 
        """
 
        Add all changeset in the vcs repo starting at start_rev
 
        to the index writer
 

	
 
        :param writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository from whence the
 
          changeset originates including the repository group
 
        :param repo: the vcs repository instance to index changesets for,
 
          the presumption is the repo has changesets to index
 
        :param start_rev=None: the full sha id to start indexing from
 
          if start_rev is None then index from the first changeset in
 
          the repo
 
        """
 

	
 
        if start_rev is None:
 
            start_rev = repo[0].raw_id
 

	
 
        log.debug('indexing changesets in %s starting at rev: %s',
 
                  repo_name, start_rev)
 

	
 
        indexed = 0
 
        cs_iter = repo.get_changesets(start=start_rev)
 
        total = len(cs_iter)
 
        for cs in cs_iter:
 
            log.debug('    >> %s/%s', cs, total)
 
            writer.add_document(
 
                raw_id=unicode(cs.raw_id),
 
                owner=unicode(repo.contact),
 
                date=cs._timestamp,
 
                repository=safe_unicode(repo_name),
 
                author=cs.author,
 
                message=cs.message,
 
                last=cs.last,
 
                added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(),
 
                removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(),
 
                changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(),
 
                parents=u' '.join([cs.raw_id for cs in cs.parents]),
 
            )
 
            indexed += 1
 

	
 
        log.debug('indexed %d changesets for repo %s', indexed, repo_name)
 
        return indexed
 

	
 
    def index_files(self, file_idx_writer, repo_name, repo):
 
        """
 
        Index files for given repo_name
 

	
 
        :param file_idx_writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository we're indexing
 
        :param repo: instance of vcs repo
 
        """
 
        i_cnt = iwc_cnt = 0
 
        log.debug('building index for %s @revision:%s', repo.path,
 
                                                self._get_index_revision(repo))
 
        index_rev = self._get_index_revision(repo)
 
        for idx_path in self.get_paths(repo):
 
            i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
 
            i_cnt += i
 
            iwc_cnt += iwc
 

	
 
        log.debug('added %s files %s with content for repo %s',
 
                  i_cnt + iwc_cnt, iwc_cnt, repo.path)
 
        return i_cnt, iwc_cnt
 

	
 
    def update_changeset_index(self):
 
        idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME)
 

	
 
        with idx.searcher() as searcher:
 
            writer = idx.writer()
 
            writer_is_dirty = False
 
            try:
 
                indexed_total = 0
 
                repo_name = None
 
                for repo_name, repo in self.repo_paths.items():
 
                    # skip indexing if there aren't any revs in the repo
 
                    num_of_revs = len(repo)
 
                    if num_of_revs < 1:
 
                        continue
 

	
 
                    qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
 
                    q = qp.parse(u"last:t AND %s" % repo_name)
 

	
 
                    results = searcher.search(q)
 

	
 
                    # default to scanning the entire repo
 
                    last_rev = 0
 
                    start_id = None
 

	
 
                    if len(results) > 0:
 
                        # assuming that there is only one result, if not this
 
                        # may require a full re-index.
 
                        start_id = results[0]['raw_id']
 
                        last_rev = repo.get_changeset(revision=start_id).revision
 

	
 
                    # there are new changesets to index or a new repo to index
 
                    if last_rev == 0 or num_of_revs > last_rev + 1:
 
                        # delete the docs in the index for the previous
 
                        # last changeset(s)
 
                        for hit in results:
 
                            q = qp.parse(u"last:t AND %s AND raw_id:%s" %
 
                                            (repo_name, hit['raw_id']))
 
                            writer.delete_by_query(q)
 

	
 
                        # index from the previous last changeset + all new ones
 
                        indexed_total += self.index_changesets(writer,
 
                                                repo_name, repo, start_id)
 
                        writer_is_dirty = True
 
                log.debug('indexed %s changesets for repo %s',
 
                          indexed_total, repo_name
 
                )
 
            finally:
 
                if writer_is_dirty:
 
                    log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<')
 
                    writer.commit(merge=True)
 
                    log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<')
 
                else:
 
                    log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
 

	
 
    def update_file_index(self):
 
        log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
 
                   'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys()))
 

	
 
        idx = open_dir(self.index_location, indexname=self.indexname)
 
        # The set of all paths in the index
 
        indexed_paths = set()
 
        # The set of all paths we need to re-index
 
        to_index = set()
 

	
 
        writer = idx.writer()
 
        writer_is_dirty = False
 
        try:
 
            with idx.reader() as reader:
 

	
 
                # Loop over the stored fields in the index
 
                for fields in reader.all_stored_fields():
 
                    indexed_path = fields['path']
 
                    indexed_repo_path = fields['repository']
 
                    indexed_paths.add(indexed_path)
 

	
 
                    if not indexed_repo_path in self.filtered_repo_update_paths:
 
                        continue
 

	
 
                    repo = self.repo_paths[indexed_repo_path]
 

	
 
                    try:
 
                        node = self.get_node(repo, indexed_path)
 
                        # Check if this file was changed since it was indexed
 
                        indexed_time = fields['modtime']
 
                        mtime = self.get_node_mtime(node)
 
                        if mtime > indexed_time:
 
                            # The file has changed, delete it and add it to
 
                            # the list of files to reindex
 
                            log.debug(
 
                                'adding to reindex list %s mtime: %s vs %s',
 
                                    indexed_path, mtime, indexed_time
 
                            )
 
                            writer.delete_by_term('fileid', indexed_path)
 
                            writer_is_dirty = True
 

	
 
                            to_index.add(indexed_path)
 
                    except (ChangesetError, NodeDoesNotExistError):
 
                        # This file was deleted since it was indexed
 
                        log.debug('removing from index %s', indexed_path)
 
                        writer.delete_by_term('path', indexed_path)
 
                        writer_is_dirty = True
 

	
 
            # Loop over the files in the filesystem
 
            # Assume we have a function that gathers the filenames of the
 
            # documents to be indexed
 
            ri_cnt_total = 0  # indexed
 
            riwc_cnt_total = 0  # indexed with content
 
            for repo_name, repo in self.repo_paths.items():
 
                # skip indexing if there aren't any revisions
 
                if len(repo) < 1:
 
                    continue
 
                ri_cnt = 0   # indexed
 
                riwc_cnt = 0  # indexed with content
 
                for path in self.get_paths(repo):
 
                    path = safe_unicode(path)
 
                    if path in to_index or path not in indexed_paths:
 

	
 
                        # This is either a file that's changed, or a new file
 
                        # that wasn't indexed before. So index it!
 
                        i, iwc = self.add_doc(writer, path, repo, repo_name)
 
                        writer_is_dirty = True
 
                        log.debug('re indexing %s', path)
 
                        ri_cnt += i
 
                        ri_cnt_total += 1
 
                        riwc_cnt += iwc
 
                        riwc_cnt_total += iwc
 
                log.debug('added %s files %s with content for repo %s',
 
                             ri_cnt + riwc_cnt, riwc_cnt, repo.path
 
                )
 
            log.debug('indexed %s files in total and %s with content',
 
                        ri_cnt_total, riwc_cnt_total
 
            )
 
        finally:
 
            if writer_is_dirty:
 
                log.debug('>> COMMITING CHANGES TO FILE INDEX <<')
 
                writer.commit(merge=True)
 
                log.debug('>>> FINISHED REBUILDING FILE INDEX <<<')
 
            else:
 
                log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<')
 
                writer.cancel()
 

	
 
    def build_indexes(self):
 
        if os.path.exists(self.index_location):
 
            log.debug('removing previous index')
 
            rmtree(self.index_location)
 

	
 
        if not os.path.exists(self.index_location):
 
            os.mkdir(self.index_location)
 

	
 
        chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA,
 
                               indexname=CHGSET_IDX_NAME)
 
        chgset_idx_writer = chgset_idx.writer()
 

	
 
        file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
 
        file_idx_writer = file_idx.writer()
 
        log.debug('BUILDING INDEX FOR EXTENSIONS %s '
 
                  'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys()))
 

	
 
        for repo_name, repo in self.repo_paths.items():
 
            # skip indexing if there aren't any revisions
 
            if len(repo) < 1:
 
                continue
 

	
 
            self.index_files(file_idx_writer, repo_name, repo)
 
            self.index_changesets(chgset_idx_writer, repo_name, repo)
 

	
 
        log.debug('>> COMMITING CHANGES <<')
 
        file_idx_writer.commit(merge=True)
 
        chgset_idx_writer.commit(merge=True)
 
        log.debug('>>> FINISHED BUILDING INDEX <<<')
 

	
 
    def update_indexes(self):
 
        self.update_file_index()
 
        self.update_changeset_index()
 

	
 
    def run(self, full_index=False):
 
        """Run daemon"""
 
        if full_index or self.initial:
 
            self.build_indexes()
 
        else:
 
            self.update_indexes()
kallithea/lib/paster_commands/cache_keys.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.cache_keys
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
cleanup-keys paster command for Kallithea
 

	
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: mar 27, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from kallithea.model.meta import Session
 
from kallithea.lib.utils import BasePasterCommand
 
from kallithea.model.db import CacheInvalidation
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Cache keys utils"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 
        _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all()
 
        if self.options.show:
 
            for c_obj in _caches:
 
                print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active)
 
        elif self.options.cleanup:
 
            for c_obj in _caches:
 
                Session().delete(c_obj)
 
                print 'removing key:%s' % (c_obj.cache_key)
 
                Session().commit()
 
        else:
 
            print 'nothing done exiting...'
 
        sys.exit(0)
 

	
 
    def update_parser(self):
 
        self.parser.add_option(
 
            '--show',
 
            action='store_true',
 
            dest='show',
 
            help=("show existing cache keys with together with status")
 
        )
 

	
 
        self.parser.add_option(
 
            '--cleanup',
 
            action="store_true",
 
            dest="cleanup",
 
            help="cleanup existing cache keys"
 
        )
kallithea/lib/paster_commands/cleanup.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.cleanup
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
cleanup-repos paster command for Kallithea
 

	
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import re
 
import shutil
 
import logging
 
import datetime
 

	
 
from kallithea.lib.utils import BasePasterCommand, ask_ok, REMOVED_REPO_PAT
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.model.db import Ui
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Cleanup deleted repos"
 

	
 
    def _parse_older_than(self, val):
 
        regex = re.compile(r'((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)m)?((?P<seconds>\d+?)s)?')
 
        parts = regex.match(val)
 
        if not parts:
 
            return
 
        parts = parts.groupdict()
 
        time_params = {}
 
        for (name, param) in parts.iteritems():
 
            if param:
 
                time_params[name] = int(param)
 
        return datetime.timedelta(**time_params)
 

	
 
    def _extract_date(self, name):
 
        """
 
        Extract the date part from rm__<date> pattern of removed repos,
 
        and convert it to datetime object
 

	
 
        :param name:
 
        """
 
        date_part = name[4:19]  # 4:19 since we don't parse milisecods
 
        return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S')
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        repos_location = Ui.get_repos_location()
 
        to_remove = []
 
        for dn_, dirs, f in os.walk(safe_str(repos_location)):
 
            alldirs = list(dirs)
 
            del dirs[:]
 
            if ('.hg' in alldirs or
 
                'objects' in alldirs and ('refs' in alldirs or 'packed-refs' in f)):
 
                continue
 
            for loc in alldirs:
 
                if REMOVED_REPO_PAT.match(loc):
 
                    to_remove.append([os.path.join(dn_, loc),
 
                                      self._extract_date(loc)])
 
                else:
 
                    dirs.append(loc)
 

	
 
        #filter older than (if present)!
 
        now = datetime.datetime.now()
 
        older_than = self.options.older_than
 
        if older_than:
 
            to_remove_filtered = []
 
            older_than_date = self._parse_older_than(older_than)
 
            for name, date_ in to_remove:
 
                repo_age = now - date_
 
                if repo_age > older_than_date:
 
                    to_remove_filtered.append([name, date_])
 

	
 
            to_remove = to_remove_filtered
 
            print >> sys.stdout, 'removing %s deleted repos older than %s (%s)' \
 
                % (len(to_remove), older_than, older_than_date)
 
        else:
 
            print >> sys.stdout, 'removing all [%s] deleted repos' \
 
                % len(to_remove)
 
        if self.options.dont_ask or not to_remove:
 
            # don't ask just remove !
 
            remove = True
 
        else:
 
            remove = ask_ok('the following repositories will be deleted completely:\n%s\n'
 
                            'are you sure you want to remove them [y/n]?'
 
                            % ', \n'.join(['%s removed on %s'
 
                    % (safe_str(x[0]), safe_str(x[1])) for x in to_remove]))
 

	
 
        if remove:
 
            for path, date_ in to_remove:
 
                print >> sys.stdout, 'removing repository %s' % path
 
                shutil.rmtree(path)
 
        else:
 
            print 'nothing done exiting...'
 
            sys.exit(0)
 

	
 
    def update_parser(self):
 
        self.parser.add_option(
 
            '--older-than',
 
            action='store',
 
            dest='older_than',
 
            help=("only remove repos that have been removed "
 
                 "at least given time ago. "
 
                 "The default is to remove all removed repositories. "
 
                 "Possible suffixes: "
 
                 "d (days), h (hours), m (minutes), s (seconds). "
 
                 "For example --older-than=30d deletes repositories "
 
                 "removed more than 30 days ago.")
 
            )
 

	
 
        self.parser.add_option(
 
            '--dont-ask',
 
            action="store_true",
 
            dest="dont_ask",
 
            help="remove repositories without asking for confirmation."
 
        )
kallithea/lib/paster_commands/ishell.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.ishell
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
interactive shell paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from kallithea.lib.utils import BasePasterCommand
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Interactive shell"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        # imports, used in ipython shell
 
        import os
 
        import sys
 
        import time
 
        import shutil
 
        import datetime
 
        from kallithea.model.db import *
 

	
 
        try:
 
            from IPython import embed
 
            from IPython.config.loader import Config
 
            cfg = Config()
 
            cfg.InteractiveShellEmbed.confirm_exit = False
 
            embed(config=cfg, banner1="Kallithea IShell.")
 
        except ImportError:
 
            print 'ipython installation required for ishell'
 
            sys.exit(-1)
 

	
 
    def update_parser(self):
 
        pass
kallithea/lib/paster_commands/make_index.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.make_index
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
make-index paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Aug 17, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from string import strip
 
from kallithea.model.repo import RepoModel
 
from kallithea.lib.utils import BasePasterCommand, load_rcextensions
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Creates or updates full text search index"
 

	
 
    def command(self):
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        #get SqlAlchemy session
 
        self._init_session()
 
        from pylons import config
 
        index_location = config['index_dir']
 
        load_rcextensions(config['here'])
 

	
 
        repo_location = self.options.repo_location \
 
            if self.options.repo_location else RepoModel().repos_path
 
        repo_list = map(strip, self.options.repo_list.split(',')) \
 
            if self.options.repo_list else None
 

	
 
        repo_update_list = map(strip, self.options.repo_update_list.split(',')) \
 
            if self.options.repo_update_list else None
 

	
 
        #======================================================================
 
        # WHOOSH DAEMON
 
        #======================================================================
 
        from kallithea.lib.pidlock import LockHeld, DaemonLock
 
        from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
        try:
 
            l = DaemonLock(file_=os.path.join(dn(dn(index_location)),
 
                                              'make_index.lock'))
 
            WhooshIndexingDaemon(index_location=index_location,
 
                                 repo_location=repo_location,
 
                                 repo_list=repo_list,
 
                                 repo_update_list=repo_update_list)\
 
                .run(full_index=self.options.full_index)
 
            l.release()
 
        except LockHeld:
 
            sys.exit(1)
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--repo-location',
 
                          action='store',
 
                          dest='repo_location',
 
                          help="Specifies repositories location to index OPTIONAL",
 
                          )
 
        self.parser.add_option('--index-only',
 
                          action='store',
 
                          dest='repo_list',
 
                          help="Specifies a comma separated list of repositories "
 
                                "to build index on. If not given all repositories "
 
                                "are scanned for indexing. OPTIONAL",
 
                          )
 
        self.parser.add_option('--update-only',
 
                          action='store',
 
                          dest='repo_update_list',
 
                          help="Specifies a comma separated list of repositories "
 
                                "to re-build index on. OPTIONAL",
 
                          )
 
        self.parser.add_option('-f',
 
                          action='store_true',
 
                          dest='full_index',
 
                          help="Specifies that index should be made full i.e"
 
                                " destroy old and build from scratch",
 
                          default=False)
kallithea/lib/paster_commands/make_rcextensions.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.make_rcextensions
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
make-rcext paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Mar 6, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import pkg_resources
 

	
 
from kallithea.lib.utils import BasePasterCommand, ask_ok
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Write template file for extending Kallithea in Python."
 
    usage = "CONFIG_FILE"
 
    description = '''\
 
        A rcextensions directory with a __init__.py file will be created next to
 
        the ini file. Local customizations in that file will survive upgrades.
 
        The file contains instructions on how it can be customized.
 
        '''
 

	
 
    def command(self):
 
        from pylons import config
 

	
 
        here = config['here']
 
        content = pkg_resources.resource_string(
 
            'kallithea', os.path.join('config', 'rcextensions', '__init__.py')
 
        )
 
        ext_file = os.path.join(here, 'rcextensions', '__init__.py')
 
        if os.path.exists(ext_file):
 
            msg = ('Extension file already exists, do you want '
 
                   'to overwrite it ? [y/n]')
 
            if not ask_ok(msg):
 
                print 'Nothing done...'
 
                return
 

	
 
        dirname = os.path.dirname(ext_file)
 
        if not os.path.isdir(dirname):
 
            os.makedirs(dirname)
 
        with open(ext_file, 'wb') as f:
 
            f.write(content)
 
            print 'Wrote new extensions file to %s' % ext_file
 

	
 
    def update_parser(self):
 
        pass
kallithea/lib/paster_commands/repo_scan.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.repo_scan
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
repo-scan paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Feb 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from kallithea.model.scm import ScmModel
 
from kallithea.lib.utils import BasePasterCommand, repo2db_mapper
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Rescan default location for new repositories"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 
        rm_obsolete = self.options.delete_obsolete
 
        log.info('Now scanning root location for new repos...')
 
        added, removed = repo2db_mapper(ScmModel().repo_scan(),
 
                                        remove_obsolete=rm_obsolete)
 
        added = ', '.join(added) or '-'
 
        removed = ', '.join(removed) or '-'
 
        log.info('Scan completed added: %s removed: %s', added, removed)
 

	
 
    def update_parser(self):
 
        self.parser.add_option(
 
            '--delete-obsolete',
 
            action='store_true',
 
            help="Use this flag do delete repositories that are "
 
                 "present in Kallithea database but not on the filesystem",
 
        )
kallithea/lib/paster_commands/update_repoinfo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.update_repoinfo
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
update-repoinfo paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 
import string
 

	
 
from kallithea.lib.utils import BasePasterCommand
 
from kallithea.model.db import Repository
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.meta import Session
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Updates repositories caches for last changeset"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        repo_update_list = map(string.strip,
 
                               self.options.repo_update_list.split(',')) \
 
                               if self.options.repo_update_list else None
 

	
 
        if repo_update_list:
 
            repo_list = list(Repository.query()\
 
                .filter(Repository.repo_name.in_(repo_update_list)))
 
        else:
 
            repo_list = Repository.getAll()
 
        RepoModel.update_repoinfo(repositories=repo_list)
 
        Session().commit()
 

	
 
        if self.options.invalidate_cache:
 
            for r in repo_list:
 
                r.set_invalidate()
 
        print 'Updated cache for %s repositories' % (len(repo_list))
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--update-only',
 
                           action='store',
 
                           dest='repo_update_list',
 
                           help="Specifies a comma separated list of repositories "
 
                                "to update last commit info for. OPTIONAL")
 
        self.parser.add_option('--invalidate-cache',
 
                           action='store_true',
 
                           dest='invalidate_cache',
 
                           help="Trigger cache invalidation event for repos. "
 
                                "OPTIONAL")
kallithea/lib/pidlock.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from __future__ import with_statement
 
import os
 
import errno
 

	
 
from multiprocessing.util import Finalize
 

	
 
from kallithea.lib.compat import kill
 

	
 

	
 
class LockHeld(Exception):
 
    pass
 

	
 

	
 
class DaemonLock(object):
 
    """daemon locking
 
    USAGE:
 
    try:
 
        l = DaemonLock(file_='/path/tolockfile',desc='test lock')
 
        main()
 
        l.release()
 
    except LockHeld:
 
        sys.exit(1)
 
    """
 

	
 
    def __init__(self, file_=None, callbackfn=None,
 
                 desc='daemon lock', debug=False):
 

	
 
        lock_name = os.path.join(os.path.dirname(__file__), 'running.lock')
 
        self.pidfile = file_ if file_ else lock_name
 
        self.callbackfn = callbackfn
 
        self.desc = desc
 
        self.debug = debug
 
        self.held = False
 
        #run the lock automatically !
 
        self.lock()
 
        self._finalize = Finalize(self, DaemonLock._on_finalize,
 
                                  args=(self, debug), exitpriority=10)
 

	
 
    @staticmethod
 
    def _on_finalize(lock, debug):
 
        if lock.held:
 
            if debug:
 
                print 'lock held finalizing and running lock.release()'
 
            lock.release()
 

	
 
    def lock(self):
 
        """
 
        locking function, if lock is present it
 
        will raise LockHeld exception
 
        """
 
        lockname = str(os.getpid())
 
        if self.debug:
 
            print 'running lock'
 
        self.trylock()
 
        self.makelock(lockname, self.pidfile)
 
        return True
 

	
 
    def trylock(self):
 
        running_pid = False
 
        if self.debug:
 
            print 'checking for already running process'
 
        try:
 
            with open(self.pidfile, 'r') as f:
 
                try:
 
                    running_pid = int(f.readline())
 
                except ValueError:
 
                    running_pid = -1
 

	
 
            if self.debug:
 
                print ('lock file present running_pid: %s, '
 
                       'checking for execution' % (running_pid,))
 
            # Now we check the PID from lock file matches to the current
 
            # process PID
 
            if running_pid:
 
                try:
 
                    kill(running_pid, 0)
 
                except OSError as exc:
 
                    if exc.errno in (errno.ESRCH, errno.EPERM):
 
                        print ("Lock File is there but"
 
                               " the program is not running")
 
                        print "Removing lock file for the: %s" % running_pid
 
                        self.release()
 
                    else:
 
                        raise
 
                else:
 
                    print "You already have an instance of the program running"
 
                    print "It is running as process %s" % running_pid
 
                    raise LockHeld()
 

	
 
        except IOError as e:
 
            if e.errno != 2:
 
                raise
 

	
 
    def release(self):
 
        """releases the pid by removing the pidfile
 
        """
 
        if self.debug:
 
            print 'trying to release the pidlock'
 

	
 
        if self.callbackfn:
 
            #execute callback function on release
 
            if self.debug:
 
                print 'executing callback function %s' % self.callbackfn
 
            self.callbackfn()
 
        try:
 
            if self.debug:
 
                print 'removing pidfile %s' % self.pidfile
 
            os.remove(self.pidfile)
 
            self.held = False
 
        except OSError as e:
 
            if self.debug:
 
                print 'removing pidfile failed %s' % e
 
            pass
 

	
 
    def makelock(self, lockname, pidfile):
 
        """
 
        this function will make an actual lock
 

	
 
        :param lockname: actual pid of file
 
        :param pidfile: the file to write the pid in
 
        """
 
        if self.debug:
 
            print 'creating a file %s and pid: %s' % (pidfile, lockname)
 

	
 
        dir_, file_ = os.path.split(pidfile)
 
        if not os.path.isdir(dir_):
 
            os.makedirs(dir_)
 
        with open(self.pidfile, 'wb') as f:
 
            f.write(lockname)
 
        self.held = True
kallithea/lib/profiler.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import objgraph
 
import cProfile
 
import pstats
 
import cgi
 
import pprint
 
import threading
 

	
 
from StringIO import StringIO
 

	
 

	
 
class ProfilingMiddleware(object):
 
    def __init__(self, app):
 
        self.lock = threading.Lock()
 
        self.app = app
 

	
 
    def __call__(self, environ, start_response):
 
        with self.lock:
 
            profiler = cProfile.Profile()
 

	
 
            def run_app(*a, **kw):
 
                self.response = self.app(environ, start_response)
 

	
 
            profiler.runcall(run_app, environ, start_response)
 

	
 
            profiler.snapshot_stats()
 

	
 
            stats = pstats.Stats(profiler)
 
            stats.sort_stats('calls') #cumulative
 

	
 
            # Redirect output
 
            out = StringIO()
 
            stats.stream = out
 

	
 
            stats.print_stats()
 

	
 
            resp = ''.join(self.response)
 

	
 
            # Lets at least only put this on html-like responses.
 
            if resp.strip().startswith('<'):
 
                ## The profiling info is just appended to the response.
 
                ##  Browsers don't mind this.
 
                resp += ('<pre style="text-align:left; '
 
                         'border-top: 4px dashed red; padding: 1em;">')
 
                resp += cgi.escape(out.getvalue(), True)
 

	
 
                ct = objgraph.show_most_common_types()
 
                print ct
 

	
 
                resp += ct if ct else '---'
 

	
 
                output = StringIO()
 
                pprint.pprint(environ, output, depth=3)
 

	
 
                resp += cgi.escape(output.getvalue(), True)
 
                resp += '</pre>'
 

	
 
            return resp
kallithea/model/api_key.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.api_key
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
API key model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Sep 8, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import time
 
import logging
 
from sqlalchemy import or_
 

	
 
from kallithea.lib.utils2 import generate_api_key
 
from kallithea.model import BaseModel
 
from kallithea.model.db import UserApiKeys
 
from kallithea.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ApiKeyModel(BaseModel):
 
    cls = UserApiKeys
 

	
 
    def create(self, user, description, lifetime=-1):
 
        """
 
        :param user: user or user_id
 
        :param description: description of ApiKey
 
        :param lifetime: expiration time in seconds
 
        """
 
        user = self._get_user(user)
 

	
 
        new_api_key = UserApiKeys()
 
        new_api_key.api_key = generate_api_key()
 
        new_api_key.user_id = user.user_id
 
        new_api_key.description = description
 
        new_api_key.expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 
        Session().add(new_api_key)
 

	
 
        return new_api_key
 

	
 
    def delete(self, api_key, user=None):
 
        """
 
        Deletes given api_key, if user is set it also filters the object for
 
        deletion by given user.
 
        """
 
        api_key = UserApiKeys.query().filter(UserApiKeys.api_key == api_key)
 

	
 
        if user is not None:
 
            user = self._get_user(user)
 
            api_key = api_key.filter(UserApiKeys.user_id == user.user_id)
 

	
 
        api_key = api_key.scalar()
 
        Session().delete(api_key)
 

	
 
    def get_api_keys(self, user, show_expired=True):
 
        user = self._get_user(user)
 
        user_api_keys = UserApiKeys.query()\
 
            .filter(UserApiKeys.user_id == user.user_id)
 
        if not show_expired:
 
            user_api_keys = user_api_keys\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time()))
 
        return user_api_keys
kallithea/model/gist.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.gist
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
gist model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import time
 
import logging
 
import traceback
 
import shutil
 

	
 
from kallithea.lib.utils2 import safe_unicode, unique_id, safe_int, \
 
    time_to_datetime, AttributeDict
 
from kallithea.lib.compat import json
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Gist
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 

	
 
log = logging.getLogger(__name__)
 

	
 
GIST_STORE_LOC = '.rc_gist_store'
 
GIST_METADATA_FILE = '.rc_gist_metadata'
 

	
 

	
 
class GistModel(BaseModel):
 
    cls = Gist
 

	
 
    def _get_gist(self, gist):
 
        """
 
        Helper method to get gist by ID, or gist_access_id as a fallback
 

	
 
        :param gist: GistID, gist_access_id, or Gist instance
 
        """
 
        return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
 

	
 
    def __delete_gist(self, gist):
 
        """
 
        removes gist from filesystem
 

	
 
        :param gist: gist object
 
        """
 
        root_path = RepoModel().repos_path
 
        rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
 
        log.info("Removing %s", rm_path)
 
        shutil.rmtree(rm_path)
 

	
 
    def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
 
                        gist_expires):
 
        """
 
        store metadata inside the gist, this can be later used for imports
 
        or gist identification
 
        """
 
        metadata = {
 
            'metadata_version': '1',
 
            'gist_db_id': gist_id,
 
            'gist_access_id': gist_access_id,
 
            'gist_owner_id': user_id,
 
            'gist_type': gist_type,
 
            'gist_expires': gist_expires,
 
            'gist_updated': time.time(),
 
        }
 
        with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
 
            f.write(json.dumps(metadata))
 

	
 
    def get_gist(self, gist):
 
        return self._get_gist(gist)
 

	
 
    def get_gist_files(self, gist_access_id, revision=None):
 
        """
 
        Get files for given gist
 

	
 
        :param gist_access_id:
 
        """
 
        repo = Gist.get_by_access_id(gist_access_id)
 
        cs = repo.scm_instance.get_changeset(revision)
 
        return cs, [n for n in cs.get_node('/')]
 

	
 
    def create(self, description, owner, gist_mapping,
 
               gist_type=Gist.GIST_PUBLIC, lifetime=-1):
 
        """
 

	
 
        :param description: description of the gist
 
        :param owner: user who created this gist
 
        :param gist_mapping: mapping {filename:{'content':content},...}
 
        :param gist_type: type of gist private/public
 
        :param lifetime: in minutes, -1 == forever
 
        """
 
        owner = self._get_user(owner)
 
        gist_id = safe_unicode(unique_id(20))
 
        lifetime = safe_int(lifetime, -1)
 
        gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 
        log.debug('set GIST expiration date to: %s',
 
                  time_to_datetime(gist_expires)
 
                   if gist_expires != -1 else 'forever')
 
        #create the Database version
 
        gist = Gist()
 
        gist.gist_description = description
 
        gist.gist_access_id = gist_id
 
        gist.gist_owner = owner.user_id
 
        gist.gist_expires = gist_expires
 
        gist.gist_type = safe_unicode(gist_type)
 
        self.sa.add(gist)
 
        self.sa.flush()
 
        if gist_type == Gist.GIST_PUBLIC:
 
            # use DB ID for easy to use GIST ID
 
            gist_id = safe_unicode(gist.gist_id)
 
            gist.gist_access_id = gist_id
 
            self.sa.add(gist)
 

	
 
        gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
 
        log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
 
        repo = RepoModel()._create_filesystem_repo(
 
            repo_name=gist_id, repo_type='hg', repo_group=GIST_STORE_LOC)
 

	
 
        processed_mapping = {}
 
        for filename in gist_mapping:
 
            if filename != os.path.basename(filename):
 
                raise Exception('Filename cannot be inside a directory')
 

	
 
            content = gist_mapping[filename]['content']
 
            #TODO: expand support for setting explicit lexers
 
#             if lexer is None:
 
#                 try:
 
#                     guess_lexer = pygments.lexers.guess_lexer_for_filename
 
#                     lexer = guess_lexer(filename,content)
 
#                 except pygments.util.ClassNotFound:
 
#                     lexer = 'text'
 
            processed_mapping[filename] = {'content': content}
 

	
 
        # now create single multifile commit
 
        message = 'added file'
 
        message += 's: ' if len(processed_mapping) > 1 else ': '
 
        message += ', '.join([x for x in processed_mapping])
 

	
 
        #fake Kallithea Repository object
 
        fake_repo = AttributeDict(dict(
 
            repo_name=gist_repo_path,
 
            scm_instance_no_cache=lambda: repo,
 
        ))
 
        ScmModel().create_nodes(
 
            user=owner.user_id, repo=fake_repo,
 
            message=message,
 
            nodes=processed_mapping,
 
            trigger_push_hook=False
 
        )
 

	
 
        self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
 
                             owner.user_id, gist.gist_type, gist.gist_expires)
 
        return gist
 

	
 
    def delete(self, gist, fs_remove=True):
 
        gist = self._get_gist(gist)
 
        try:
 
            self.sa.delete(gist)
 
            if fs_remove:
 
                self.__delete_gist(gist)
 
            else:
 
                log.debug('skipping removal from filesystem')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update(self, gist, description, owner, gist_mapping, gist_type,
 
               lifetime):
 
        gist = self._get_gist(gist)
 
        gist_repo = gist.scm_instance
 

	
 
        lifetime = safe_int(lifetime, -1)
 
        if lifetime == 0:  # preserve old value
 
            gist_expires = gist.gist_expires
 
        else:
 
            gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 

	
 
        #calculate operation type based on given data
 
        gist_mapping_op = {}
 
        for k, v in gist_mapping.items():
 
            # add, mod, del
 
            if not v['org_filename'] and v['filename']:
 
                op = 'add'
 
            elif v['org_filename'] and not v['filename']:
 
                op = 'del'
 
            else:
 
                op = 'mod'
 

	
 
            v['op'] = op
 
            gist_mapping_op[k] = v
 

	
 
        gist.gist_description = description
 
        gist.gist_expires = gist_expires
 
        gist.owner = owner
 
        gist.gist_type = gist_type
 
        self.sa.add(gist)
 
        self.sa.flush()
 

	
 
        message = 'updated file'
 
        message += 's: ' if len(gist_mapping) > 1 else ': '
 
        message += ', '.join([x for x in gist_mapping])
 

	
 
        #fake Kallithea Repository object
 
        fake_repo = AttributeDict(dict(
 
            repo_name=gist_repo.path,
 
            scm_instance_no_cache=lambda: gist_repo,
 
        ))
 

	
 
        self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
 
                             owner.user_id, gist.gist_type, gist.gist_expires)
 

	
 
        ScmModel().update_nodes(
 
            user=owner.user_id,
 
            repo=fake_repo,
 
            message=message,
 
            nodes=gist_mapping_op,
 
            trigger_push_hook=False
 
        )
 

	
 
        return gist
kallithea/model/repo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import shutil
 
import logging
 
import traceback
 
from datetime import datetime
 
from sqlalchemy.orm import subqueryload
 

	
 
from kallithea.lib.utils import make_ui
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.lib.compat import json
 
from kallithea.lib.utils2 import LazyProperty, safe_str, safe_unicode, \
 
    remove_prefix, obfuscate_url_pw, get_current_authuser
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.hooks import log_delete_repository
 

	
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, UserRepoToPerm, UserGroupRepoToPerm, \
 
    UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, \
 
    Statistics, UserGroup, Ui, RepoGroup, RepositoryField
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionAny, HasUserGroupPermissionAny
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.model.scm import UserGroupList
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(BaseModel):
 

	
 
    cls = Repository
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _get_user_group(self, users_group):
 
        return self._get_instance(UserGroup, users_group,
 
                                  callback=UserGroup.get_by_group_name)
 

	
 
    def _get_repo_group(self, repo_group):
 
        return self._get_instance(RepoGroup, repo_group,
 
                                  callback=RepoGroup.get_by_group_name)
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
        repo_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        repo_to_perm.repository = repository
 
        repo_to_perm.user_id = def_user.user_id
 

	
 
        return repo_to_perm
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 
        return q.ui_value
 

	
 
    def get(self, repo_id, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_id == repo_id)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_id))
 
        return repo.scalar()
 

	
 
    def get_repo(self, repository):
 
        return self._get_repo(repository)
 

	
 
    def get_by_repo_name(self, repo_name, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_name == repo_name)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_name))
 
        return repo.scalar()
 

	
 
    def get_all_user_repos(self, user):
 
        """
 
        Gets all repositories that user have at least read access
 

	
 
        :param user:
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        user = self._get_user(user)
 
        repos = AuthUser(user_id=user.user_id).permissions['repositories']
 
        access_check = lambda r: r[1] in ['repository.read',
 
                                          'repository.write',
 
                                          'repository.admin']
 
        repos = [x[0] for x in filter(access_check, repos.items())]
 
        return Repository.query().filter(Repository.repo_name.in_(repos))
 

	
 
    def get_users_js(self):
 
        users = self.sa.query(User).filter(User.active == True).all()
 
        return json.dumps([
 
            {
 
                'id': u.user_id,
 
                'fname': h.escape(u.name),
 
                'lname': h.escape(u.lastname),
 
                'nname': u.username,
 
                'gravatar_lnk': h.gravatar_url(u.email, size=28),
 
                'gravatar_size': 14,
 
            } for u in users]
 
        )
 

	
 
    def get_user_groups_js(self):
 
        user_groups = self.sa.query(UserGroup) \
 
            .filter(UserGroup.users_group_active == True) \
 
            .options(subqueryload(UserGroup.members)) \
 
            .all()
 
        user_groups = UserGroupList(user_groups, perm_set=['usergroup.read',
 
                                                           'usergroup.write',
 
                                                           'usergroup.admin'])
 
        return json.dumps([
 
            {
 
                'id': gr.users_group_id,
 
                'grname': gr.users_group_name,
 
                'grmembers': len(gr.members),
 
            } for gr in user_groups]
 
        )
 

	
 
    @classmethod
 
    def _render_datatable(cls, tmpl, *args, **kwargs):
 
        import kallithea
 
        from pylons import tmpl_context as c
 
        from pylons.i18n.translation import _
 

	
 
        _tmpl_lookup = kallithea.CONFIG['pylons.app_globals'].mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        tmpl = template.get_def(tmpl)
 
        kwargs.update(dict(_=_, h=h, c=c))
 
        return tmpl.render(*args, **kwargs)
 

	
 
    @classmethod
 
    def update_repoinfo(cls, repositories=None):
 
        if not repositories:
 
            repositories = Repository.getAll()
 
        for repo in repositories:
 
            repo.update_changeset_cache()
 

	
 
    def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
 
                          super_user_actions=False):
 
        _render = self._render_datatable
 
        from pylons import tmpl_context as c
 

	
 
        def quick_menu(repo_name):
 
            return _render('quick_menu', repo_name)
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=not admin, admin=False)
 

	
 
        def last_change(last_change):
 
            return _render("last_change", last_change)
 

	
 
        def rss_lnk(repo_name):
 
            return _render("rss", repo_name)
 

	
 
        def atom_lnk(repo_name):
 
            return _render("atom", repo_name)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            return h.urlify_text(desc, truncate=60, stylize=c.visual.stylify_metatags)
 

	
 
        def state(repo_state):
 
            return _render("repo_state", repo_state)
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name, super_user_actions)
 

	
 
        def owner_actions(user_id, username):
 
            return _render('user_name', user_id, username)
 

	
 
        repos_data = []
 
        for repo in repos_list:
 
            if perm_check:
 
                # check permission at this level
 
                if not HasRepoPermissionAny(
 
                        'repository.read', 'repository.write',
 
                        'repository.admin'
 
                )(repo.repo_name, 'get_repos_as_dict check'):
 
                    continue
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "menu": quick_menu(repo.repo_name),
 
                "raw_name": repo.repo_name.lower(),
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_change": last_change(repo.last_db_change),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "desc": desc(repo.description),
 
                "owner": h.person(repo.user),
 
                "state": state(repo.repo_state),
 
                "rss": rss_lnk(repo.repo_name),
 
                "atom": atom_lnk(repo.repo_name),
 

	
 
            }
 
            if admin:
 
                row.update({
 
                    "action": repo_actions(repo.repo_name),
 
                    "owner": owner_actions(repo.user.user_id,
 
                                           h.person(repo.user))
 
                })
 
            repos_data.append(row)
 

	
 
        return {
 
            "totalRecords": len(repos_list),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": repos_data
 
        }
 

	
 
    def _get_defaults(self, repo_name):
 
        """
 
        Gets information about repository, and returns a dict for
 
        usage in forms
 

	
 
        :param repo_name:
 
        """
 

	
 
        repo_info = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_info is None:
 
            return None
 

	
 
        defaults = repo_info.get_dict()
 
        group, repo_name, repo_name_full = repo_info.groups_and_repo
 
        defaults['repo_name'] = repo_name
 
        defaults['repo_group'] = getattr(group[-1] if group else None,
 
                                         'group_id', None)
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'), (1, 'repo_enable_locking'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.user:
 
            defaults.update({'user': repo_info.user.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'user': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = self._get_repo(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'user' in kwargs:
 
                cur_repo.user = User.get_by_username(kwargs['user'])
 

	
 
            if 'repo_group' in kwargs:
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_enable_locking',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                cur_repo.clone_uri = clone_uri
 

	
 
            new_name = cur_repo.get_new_name(kwargs['repo_name'])
 
            cur_repo.repo_name = new_name
 
            #if private flag is set, reset default permission to NONE
 

	
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                #handle extra fields
 
            for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
 
                                kwargs):
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 
                    self.sa.add(ex_field)
 
            self.sa.add(cur_repo)
 

	
 
            if org_repo_name != new_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=new_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
 
                     private=False, clone_uri=None, repo_group=None,
 
                     landing_rev='rev:tip', fork_of=None,
 
                     copy_fork_permissions=False, enable_statistics=False,
 
                     enable_locking=False, enable_downloads=False,
 
                     copy_group_permissions=False, state=Repository.STATE_PENDING):
 
        """
 
        Create repository inside database with PENDING state. This should only be
 
        executed by create() repo, with exception of importing existing repos.
 

	
 
        """
 
        from kallithea.model.scm import ScmModel
 

	
 
        owner = self._get_user(owner)
 
        fork_of = self._get_repo(fork_of)
 
        repo_group = self._get_repo_group(repo_group)
 
        try:
 
            repo_name = safe_unicode(repo_name)
 
            description = safe_unicode(description)
 
            # repo name is just a name of repository
 
            # while repo_name_full is a full qualified name that is combined
 
            # with name and path of group
 
            repo_name_full = repo_name
 
            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
 

	
 
            new_repo = Repository()
 
            new_repo.repo_state = state
 
            new_repo.enable_statistics = False
 
            new_repo.repo_name = repo_name_full
 
            new_repo.repo_type = repo_type
 
            new_repo.user = owner
 
            new_repo.group = repo_group
 
            new_repo.description = description or repo_name
 
            new_repo.private = private
 
            new_repo.clone_uri = clone_uri
 
            new_repo.landing_rev = landing_rev
 

	
 
            new_repo.enable_statistics = enable_statistics
 
            new_repo.enable_locking = enable_locking
 
            new_repo.enable_downloads = enable_downloads
 

	
 
            if repo_group:
 
                new_repo.enable_locking = repo_group.enable_locking
 

	
 
            if fork_of:
 
                parent_repo = fork_of
 
                new_repo.fork = parent_repo
 

	
 
            self.sa.add(new_repo)
 

	
 
            if fork_of and copy_fork_permissions:
 
                repo = fork_of
 
                user_perms = UserRepoToPerm.query() \
 
                    .filter(UserRepoToPerm.repository == repo).all()
 
                group_perms = UserGroupRepoToPerm.query() \
 
                    .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                for perm in user_perms:
 
                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 

	
 
                for perm in group_perms:
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                               perm.permission)
 

	
 
            elif repo_group and copy_group_permissions:
 

	
 
                user_perms = UserRepoGroupToPerm.query() \
 
                    .filter(UserRepoGroupToPerm.group == repo_group).all()
 

	
 
                group_perms = UserGroupRepoGroupToPerm.query() \
 
                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
 

	
 
                for perm in user_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 

	
 
                for perm in group_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 

	
 
            else:
 
                perm_obj = self._create_default_perms(new_repo, private)
 
                self.sa.add(perm_obj)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
 
                                                    owner.user_id)
 
            # we need to flush here, in order to check if database won't
 
            # throw any exceptions, create filesystem dirs at the very end
 
            self.sa.flush()
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user):
 
        """
 
        Create repository using celery tasks
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 
        return run_task(tasks.create_repo, form_data, cur_user)
 

	
 
    def _update_permissions(self, repo, perms_new=None, perms_updates=None,
 
                            check_perms=True):
 
        if not perms_new:
 
            perms_new = []
 
        if not perms_updates:
 
            perms_updates = []
 

	
 
        # update permissions
 
        for member, perm, member_type in perms_updates:
 
            if member_type == 'user':
 
                # this updates existing one
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 
            # set new permissions
 
        for member, perm, member_type in perms_new:
 
            if member_type == 'user':
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 

	
 
    def create_fork(self, form_data, cur_user):
 
        """
 
        Simple wrapper into executing celery task for fork creation
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 
        return run_task(tasks.create_repo_fork, form_data, cur_user)
 

	
 
    def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
 
        """
 
        Delete given repository, forks parameter defines what do do with
 
        attached forks. Throws AttachedForksError if deleted repo has attached
 
        forks
 

	
 
        :param repo:
 
        :param forks: str 'delete' or 'detach'
 
        :param fs_remove: remove(archive) repo from filesystem
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        repo = self._get_repo(repo)
 
        if repo is not None:
 
            if forks == 'detach':
 
                for r in repo.forks:
 
                    r.fork = None
 
                    self.sa.add(r)
 
            elif forks == 'delete':
 
                for r in repo.forks:
 
                    self.delete(r, forks='delete')
 
            elif [f for f in repo.forks]:
 
                raise AttachedForksError()
 

	
 
            old_repo_dict = repo.get_dict()
 
            try:
 
                self.sa.delete(repo)
 
                if fs_remove:
 
                    self._delete_filesystem_repo(repo)
 
                else:
 
                    log.debug('skipping removal from filesystem')
 
                log_delete_repository(old_repo_dict,
 
                                      deleted_by=cur_user)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
    def grant_user_permission(self, repo, user, perm):
 
        """
 
        Grant permission for user on given repository, or update existing one
 
        if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserRepoToPerm()
 
        obj.repository = repo
 
        obj.user = user
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, user, repo)
 
        return obj
 

	
 
    def revoke_user_permission(self, repo, user):
 
        """
 
        Revoke permission for user on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 

	
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm on %s on %s', repo, user)
 

	
 
    def grant_user_group_permission(self, repo, group_name, perm):
 
        """
 
        Grant permission for user group on given repository, or update
 
        existing one if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self._get_user_group(group_name)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .scalar()
 

	
 
        if obj is None:
 
            # create new
 
            obj = UserGroupRepoToPerm()
 

	
 
        obj.repository = repo
 
        obj.users_group = group_name
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, repo, group_name):
 
        """
 
        Revoke permission for user group on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self._get_user_group(group_name)
 

	
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm to %s on %s', repo, group_name)
 

	
 
    def delete_stats(self, repo_name):
 
        """
 
        removes stats for given repo
 

	
 
        :param repo_name:
 
        """
 
        repo = self._get_repo(repo_name)
 
        try:
 
            obj = self.sa.query(Statistics) \
 
                .filter(Statistics.repository == repo).scalar()
 
            if obj is not None:
 
                self.sa.delete(obj)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
 
                                clone_uri=None, repo_store_location=None):
 
        """
 
        Makes repository on filesystem. Operation is group aware, meaning that it will create
 
        a repository within a group, and alter the paths accordingly to the group location.
 

	
 
        :param repo_name:
 
        :param alias:
 
        :param parent:
 
        :param clone_uri:
 
        :param repo_store_location:
 
        """
 
        from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
 
        from kallithea.model.scm import ScmModel
 

	
 
        if '/' in repo_name:
 
            raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
 
        else:
 
            new_parent_path = repo_group or ''
 

	
 
        if repo_store_location:
 
            _paths = [repo_store_location]
 
        else:
 
            _paths = [self.repos_path, new_parent_path, repo_name]
 
            # we need to make it str for mercurial
 
        repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
 

	
 
        # check if this path is not a repository
 
        if is_valid_repo(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid repository' % repo_path)
 

	
 
        # check if this path is a group
 
        if is_valid_repo_group(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid group' % repo_path)
 

	
 
        log.info('creating repo %s in %s from url: `%s`',
 
            repo_name, safe_unicode(repo_path),
 
            obfuscate_url_pw(clone_uri))
 

	
 
        backend = get_backend(repo_type)
 

	
 
        if repo_type == 'hg':
 
            baseui = make_ui('db', clear_session=False)
 
            # patch and reset hooks section of UI config to not run any
 
            # hooks on creating remote repo
 
            for k, v in baseui.configitems('hooks'):
 
                baseui.setconfig('hooks', k, None)
 

	
 
            repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui)
 
        elif repo_type == 'git':
 
            repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
 
            # add kallithea hook into this repo
 
            ScmModel().install_git_hooks(repo=repo)
 
        else:
 
            raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 

	
 
        log.debug('Created repo %s with %s backend',
 
                  safe_unicode(repo_name), safe_unicode(repo_type))
 
        return repo
 

	
 
    def _rename_filesystem_repo(self, old, new):
 
        """
 
        renames repository on filesystem
 

	
 
        :param old: old name
 
        :param new: new name
 
        """
 
        log.info('renaming repo from %s to %s', old, new)
 

	
 
        old_path = os.path.join(self.repos_path, old)
 
        new_path = os.path.join(self.repos_path, new)
 
        if os.path.isdir(new_path):
 
            raise Exception(
 
                'Was trying to rename to already existing dir %s' % new_path
 
            )
 
        shutil.move(old_path, new_path)
 

	
 
    def _delete_filesystem_repo(self, repo):
 
        """
 
        removes repo from filesystem, the removal is actually done by
 
        renaming dir to a 'rm__*' prefix which Kallithea will skip.
 
        It can be undeleted later by reverting the rename.
 

	
 
        :param repo: repo object
 
        """
 
        rm_path = os.path.join(self.repos_path, repo.repo_name)
 
        log.info("Removing %s", rm_path)
 

	
 
        _now = datetime.now()
 
        _ms = str(_now.microsecond).rjust(6, '0')
 
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
 
                             repo.just_name)
 
        if repo.group:
 
            args = repo.group.full_path_splitted + [_d]
 
            _d = os.path.join(*args)
 
        shutil.move(rm_path, os.path.join(self.repos_path, _d))
kallithea/model/scm.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.scm
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Scm model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import re
 
import time
 
import traceback
 
import logging
 
import cStringIO
 
import pkg_resources
 
from os.path import join as jn
 

	
 
from sqlalchemy import func
 
from pylons.i18n.translation import _
 

	
 
import kallithea
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.exceptions import RepositoryError
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea import BACKENDS
 
from kallithea.lib import helpers as h
 
from kallithea.lib.utils2 import safe_str, safe_unicode, get_server_url,\
 
    _set_extras
 
from kallithea.lib.auth import HasRepoPermissionAny, HasRepoGroupPermissionAny,\
 
    HasUserGroupPermissionAny, HasPermissionAny, HasPermissionAll
 
from kallithea.lib.utils import get_filesystem_repos, make_ui, \
 
    action_logger
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, Ui, CacheInvalidation, \
 
    UserFollowing, UserLog, User, RepoGroup, PullRequest
 
from kallithea.lib.hooks import log_push_action
 
from kallithea.lib.exceptions import NonRelativePathError, IMCCommitError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
 

	
 

	
 
class CachedRepoList(object):
 
    """
 
    Cached repo list. Uses super-fast in-memory cache after initialization.
 
    """
 

	
 
    def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
 
        self.db_repo_list = db_repo_list
 
        self.repos_path = repos_path
 
        self.order_by = order_by
 
        self.reversed = (order_by or '').startswith('-')
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write',
 
                        'repository.admin']
 
        self.perm_set = perm_set
 

	
 
    def __len__(self):
 
        return len(self.db_repo_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        # pre-propagated valid_cache_keys to save executing select statements
 
        # for each repo
 
        valid_cache_keys = CacheInvalidation.get_valid_cache_keys()
 

	
 
        for dbr in self.db_repo_list:
 
            scmr = dbr.scm_instance_cached(valid_cache_keys)
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            try:
 
                last_change = scmr.last_change
 
                tip = h.get_changeset_safe(scmr, 'tip')
 
            except Exception:
 
                log.error(
 
                    '%s this repository is present in database but it '
 
                    'cannot be created as an scm instance, org_exc:%s'
 
                    % (dbr.repo_name, traceback.format_exc())
 
                )
 
                continue
 

	
 
            tmp_d = {}
 
            tmp_d['name'] = dbr.repo_name
 
            tmp_d['name_sort'] = tmp_d['name'].lower()
 
            tmp_d['raw_name'] = tmp_d['name'].lower()
 
            tmp_d['description'] = dbr.description
 
            tmp_d['description_sort'] = tmp_d['description'].lower()
 
            tmp_d['last_change'] = last_change
 
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
 
            tmp_d['tip'] = tip.raw_id
 
            tmp_d['tip_sort'] = tip.revision
 
            tmp_d['rev'] = tip.revision
 
            tmp_d['contact'] = dbr.user.full_contact
 
            tmp_d['contact_sort'] = tmp_d['contact']
 
            tmp_d['owner_sort'] = tmp_d['contact']
 
            tmp_d['repo_archives'] = list(scmr._get_archives())
 
            tmp_d['last_msg'] = tip.message
 
            tmp_d['author'] = tip.author
 
            tmp_d['dbrepo'] = dbr.get_dict()
 
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
 
            yield tmp_d
 

	
 

	
 
class SimpleCachedRepoList(CachedRepoList):
 
    """
 
    Lighter version of CachedRepoList without the scm initialisation
 
    """
 

	
 
    def __iter__(self):
 
        for dbr in self.db_repo_list:
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            tmp_d = {
 
                'name': dbr.repo_name,
 
                'dbrepo': dbr.get_dict(),
 
                'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
 
            }
 
            yield tmp_d
 

	
 

	
 
class _PermCheckIterator(object):
 
    def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None):
 
        """
 
        Creates iterator from given list of objects, additionally
 
        checking permission for them from perm_set var
 

	
 
        :param obj_list: list of db objects
 
        :param obj_attr: attribute of object to pass into perm_checker
 
        :param perm_set: list of permissions to check
 
        :param perm_checker: callable to check permissions against
 
        """
 
        self.obj_list = obj_list
 
        self.obj_attr = obj_attr
 
        self.perm_set = perm_set
 
        self.perm_checker = perm_checker
 
        self.extra_kwargs = extra_kwargs or {}
 

	
 
    def __len__(self):
 
        return len(self.obj_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        for db_obj in self.obj_list:
 
            # check permission at this level
 
            name = getattr(db_obj, self.obj_attr, None)
 
            if not self.perm_checker(*self.perm_set)(
 
                    name, self.__class__.__name__, **self.extra_kwargs):
 
                continue
 

	
 
            yield db_obj
 

	
 

	
 
class RepoList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write', 'repository.admin']
 

	
 
        super(RepoList, self).__init__(obj_list=db_repo_list,
 
                    obj_attr='repo_name', perm_set=perm_set,
 
                    perm_checker=HasRepoPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class RepoGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['group.read', 'group.write', 'group.admin']
 

	
 
        super(RepoGroupList, self).__init__(obj_list=db_repo_group_list,
 
                    obj_attr='group_name', perm_set=perm_set,
 
                    perm_checker=HasRepoGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class UserGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
 

	
 
        super(UserGroupList, self).__init__(obj_list=db_user_group_list,
 
                    obj_attr='users_group_name', perm_set=perm_set,
 
                    perm_checker=HasUserGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class ScmModel(BaseModel):
 
    """
 
    Generic Scm Model
 
    """
 

	
 
    def __get_repo(self, instance):
 
        cls = Repository
 
        if isinstance(instance, cls):
 
            return instance
 
        elif isinstance(instance, int) or safe_str(instance).isdigit():
 
            return cls.get(instance)
 
        elif isinstance(instance, basestring):
 
            return cls.get_by_repo_name(instance)
 
        elif instance is not None:
 
            raise Exception('given object must be int, basestr or Instance'
 
                            ' of %s got %s' % (type(cls), type(instance)))
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 

	
 
        return q.ui_value
 

	
 
    def repo_scan(self, repos_path=None):
 
        """
 
        Listing of repositories in given path. This path should not be a
 
        repository itself. Return a dictionary of repository objects
 

	
 
        :param repos_path: path to directory containing repositories
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s', repos_path)
 

	
 
        baseui = make_ui('db')
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path, recursive=True):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for kallithea
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories', len(repos))
 
        return repos
 

	
 
    def get_repos(self, all_repos=None, sort_key=None, simple=False):
 
        """
 
        Get all repos from db and for each repo create its
 
        backend instance and fill that backed with information from database
 

	
 
        :param all_repos: list of repository names as strings
 
            give specific repositories list, good for filtering
 

	
 
        :param sort_key: initial sorting of repos
 
        :param simple: use SimpleCachedList - one without the SCM info
 
        """
 
        if all_repos is None:
 
            all_repos = self.sa.query(Repository)\
 
                        .filter(Repository.group_id == None)\
 
                        .order_by(func.lower(Repository.repo_name)).all()
 
        if simple:
 
            repo_iter = SimpleCachedRepoList(all_repos,
 
                                             repos_path=self.repos_path,
 
                                             order_by=sort_key)
 
        else:
 
            repo_iter = CachedRepoList(all_repos,
 
                                       repos_path=self.repos_path,
 
                                       order_by=sort_key)
 

	
 
        return repo_iter
 

	
 
    def get_repo_groups(self, all_groups=None):
 
        if all_groups is None:
 
            all_groups = RepoGroup.query()\
 
                .filter(RepoGroup.group_parent_id == None).all()
 
        return [x for x in RepoGroupList(all_groups)]
 

	
 
    def mark_for_invalidation(self, repo_name, delete=False):
 
        """
 
        Mark caches of this repo invalid in the database.
 

	
 
        :param repo_name: the repo for which caches should be marked invalid
 
        """
 
        CacheInvalidation.set_invalidate(repo_name, delete=delete)
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is not None:
 
            repo.update_changeset_cache()
 

	
 
    def toggle_following_repo(self, follow_repo_id, user_id):
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repo_id == follow_repo_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                action_logger(UserTemp(user_id),
 
                              'stopped_following_repo',
 
                              RepoTemp(follow_repo_id))
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_repo_id = follow_repo_id
 
            self.sa.add(f)
 

	
 
            action_logger(UserTemp(user_id),
 
                          'started_following_repo',
 
                          RepoTemp(follow_repo_id))
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def toggle_following_user(self, follow_user_id, user_id):
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user_id == follow_user_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_user_id = follow_user_id
 
            self.sa.add(f)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def is_following_repo(self, repo_name, user_id, cache=False):
 
        r = self.sa.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repository == r)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def is_following_user(self, username, user_id, cache=False):
 
        u = User.get_by_username(username)
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user == u)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def get_followers(self, repo):
 
        repo = self._get_repo(repo)
 

	
 
        return self.sa.query(UserFollowing)\
 
                .filter(UserFollowing.follows_repository == repo).count()
 

	
 
    def get_forks(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(Repository)\
 
                .filter(Repository.fork == repo).count()
 

	
 
    def get_pull_requests(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(PullRequest)\
 
                .filter(PullRequest.other_repo == repo)\
 
                .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
 

	
 
    def mark_as_fork(self, repo, fork, user):
 
        repo = self.__get_repo(repo)
 
        fork = self.__get_repo(fork)
 
        if fork and repo.repo_id == fork.repo_id:
 
            raise Exception("Cannot set repository as fork of itself")
 

	
 
        if fork and repo.repo_type != fork.repo_type:
 
            raise RepositoryError("Cannot set repository as fork of repository with other type")
 

	
 
        repo.fork = fork
 
        self.sa.add(repo)
 
        return repo
 

	
 
    def _handle_rc_scm_extras(self, username, repo_name, repo_alias,
 
                              action=None):
 
        from kallithea import CONFIG
 
        from kallithea.lib.base import _get_ip_addr
 
        try:
 
            from pylons import request
 
            environ = request.environ
 
        except TypeError:
 
            # we might use this outside of request context, let's fake the
 
            # environ data
 
            from webob import Request
 
            environ = Request.blank('').environ
 
        extras = {
 
            'ip': _get_ip_addr(environ),
 
            'username': username,
 
            'action': action or 'push_local',
 
            'repository': repo_name,
 
            'scm': repo_alias,
 
            'config': CONFIG['__file__'],
 
            'server_url': get_server_url(environ),
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        _set_extras(extras)
 

	
 
    def _handle_push(self, repo, username, action, repo_name, revisions):
 
        """
 
        Triggers push action hooks
 

	
 
        :param repo: SCM repo
 
        :param username: username who pushes
 
        :param action: push/push_local/push_remote
 
        :param repo_name: name of repo
 
        :param revisions: list of revisions that we pushed
 
        """
 
        self._handle_rc_scm_extras(username, repo_name, repo_alias=repo.alias)
 
        _scm_repo = repo._repo
 
        # trigger push hook
 
        if repo.alias == 'hg':
 
            log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0])
 
        elif repo.alias == 'git':
 
            log_push_action(None, _scm_repo, _git_revs=revisions)
 

	
 
    def _get_IMC_module(self, scm_type):
 
        """
 
        Returns InMemoryCommit class based on scm_type
 

	
 
        :param scm_type:
 
        """
 
        if scm_type == 'hg':
 
            from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset
 
            return MercurialInMemoryChangeset
 

	
 
        if scm_type == 'git':
 
            from kallithea.lib.vcs.backends.git import GitInMemoryChangeset
 
            return GitInMemoryChangeset
 

	
 
        raise Exception('Invalid scm_type, must be one of hg,git got %s'
 
                        % (scm_type,))
 

	
 
    def pull_changes(self, repo, username):
 
        """
 
        Pull from "clone URL".
 
        """
 
        dbrepo = self.__get_repo(repo)
 
        clone_uri = dbrepo.clone_uri
 
        if not clone_uri:
 
            raise Exception("This repository doesn't have a clone uri")
 

	
 
        repo = dbrepo.scm_instance
 
        repo_name = dbrepo.repo_name
 
        try:
 
            if repo.alias == 'git':
 
                repo.fetch(clone_uri)
 
                # git doesn't really have something like post-fetch action
 
                # we fake that now. #TODO: extract fetched revisions somehow
 
                # here
 
                self._handle_push(repo,
 
                                  username=username,
 
                                  action='push_remote',
 
                                  repo_name=repo_name,
 
                                  revisions=[])
 
            else:
 
                self._handle_rc_scm_extras(username, dbrepo.repo_name,
 
                                           repo.alias, action='push_remote')
 
                repo.pull(clone_uri)
 

	
 
            self.mark_for_invalidation(repo_name)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, author, message,
 
                      content, f_path):
 
        """
 
        Commit a change to a single file
 

	
 
        :param repo: a db_repo.scm_instance
 
        """
 
        user = self._get_user(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        # message and author needs to be unicode
 
        # proper backend should then translate that into required type
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        imc = IMC(repo)
 
        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
 
        try:
 
            tip = imc.commit(message=message, author=author,
 
                             parents=[cs], branch=cs.branch)
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            raise IMCCommitError(str(e))
 
        finally:
 
            # always clear caches, if commit fails we want fresh object also
 
            self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def _sanitize_path(self, f_path):
 
        if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
 
            raise NonRelativePathError('%s is not an relative path' % f_path)
 
        if f_path:
 
            f_path = os.path.normpath(f_path)
 
        return f_path
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
        """
 
        Recursively walk root dir and return a set of all paths found.
 

	
 
        :param repo_name: name of repository
 
        :param revision: revision for which to list nodes
 
        :param root_path: root path to list
 
        :param flat: return as a list, if False returns a dict with description
 

	
 
        """
 
        _files = list()
 
        _dirs = list()
 
        try:
 
            _repo = self.__get_repo(repo_name)
 
            changeset = _repo.scm_instance.get_changeset(revision)
 
            root_path = root_path.lstrip('/')
 
            for topnode, dirs, files in changeset.walk(root_path):
 
                for f in files:
 
                    _files.append(f.path if flat else {"name": f.path,
 
                                                       "type": "file"})
 
                for d in dirs:
 
                    _dirs.append(d.path if flat else {"name": d.path,
 
                                                      "type": "dir"})
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            raise
 

	
 
        return _dirs, _files
 

	
 
    def create_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            content = nodes[f_path]['content']
 
            f_path = safe_str(f_path)
 
            # decoding here will force that we have proper encoded values
 
            # in any other case this will throw exceptions and deny commit
 
            if isinstance(content, (basestring,)):
 
                content = safe_str(content)
 
            elif isinstance(content, (file, cStringIO.OutputType,)):
 
                content = content.read()
 
            else:
 
                raise Exception('Content is of unrecognized type %s' % (
 
                    type(content)
 
                ))
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.add(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def update_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo. Again.
 
        """
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        imc_class = self._get_IMC_module(scm_instance.alias)
 
        imc = imc_class(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 

	
 
        # add multiple nodes
 
        for _filename, data in nodes.items():
 
            # new filename, can be renamed from the old one
 
            filename = self._sanitize_path(data['filename'])
 
            old_filename = self._sanitize_path(_filename)
 
            content = data['content']
 

	
 
            filenode = FileNode(old_filename, content=content)
 
            op = data['op']
 
            if op == 'add':
 
                imc.add(filenode)
 
            elif op == 'del':
 
                imc.remove(filenode)
 
            elif op == 'mod':
 
                if filename != old_filename:
 
                    #TODO: handle renames, needs vcs lib changes
 
                    imc.remove(filenode)
 
                    imc.add(FileNode(filename, content=content))
 
                else:
 
                    imc.change(filenode)
 

	
 
        # commit changes
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 

	
 
    def delete_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Deletes specified nodes from repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset after deletion
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            # content can be empty but for compatibility it allows same dicts
 
            # structure as add_nodes
 
            content = nodes[f_path].get('content')
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.remove(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def get_unread_journal(self):
 
        return self.sa.query(UserLog).count()
 

	
 
    def get_repo_landing_revs(self, repo=None):
 
        """
 
        Generates select option with tags branches and bookmarks (for hg only)
 
        grouped by type
 

	
 
        :param repo:
 
        """
 

	
 
        hist_l = []
 
        choices = []
 
        repo = self.__get_repo(repo)
 
        hist_l.append(['rev:tip', _('latest tip')])
 
        choices.append('rev:tip')
 
        if repo is None:
 
            return choices, hist_l
 

	
 
        repo = repo.scm_instance
 

	
 
        branches_group = ([(u'branch:%s' % k, k) for k, v in
 
                           repo.branches.iteritems()], _("Branches"))
 
        hist_l.append(branches_group)
 
        choices.extend([x[0] for x in branches_group[0]])
 

	
 
        if repo.alias == 'hg':
 
            bookmarks_group = ([(u'book:%s' % k, k) for k, v in
 
                                repo.bookmarks.iteritems()], _("Bookmarks"))
 
            hist_l.append(bookmarks_group)
 
            choices.extend([x[0] for x in bookmarks_group[0]])
 

	
 
        tags_group = ([(u'tag:%s' % k, k) for k, v in
 
                       repo.tags.iteritems()], _("Tags"))
 
        hist_l.append(tags_group)
 
        choices.extend([x[0] for x in tags_group[0]])
 

	
 
        return choices, hist_l
 

	
 
    def install_git_hooks(self, repo, force_create=False):
 
        """
 
        Creates a kallithea hook inside a git repository
 

	
 
        :param repo: Instance of VCS repo
 
        :param force_create: Create even if same name hook exists
 
        """
 

	
 
        loc = jn(repo.path, 'hooks')
 
        if not repo.bare:
 
            loc = jn(repo.path, '.git', 'hooks')
 
        if not os.path.isdir(loc):
 
            os.makedirs(loc)
 

	
 
        tmpl_post = pkg_resources.resource_string(
 
            'kallithea', jn('config', 'post_receive_tmpl.py')
 
        )
 
        tmpl_pre = pkg_resources.resource_string(
 
            'kallithea', jn('config', 'pre_receive_tmpl.py')
 
        )
 

	
 
        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
 
            _hook_file = jn(loc, '%s-receive' % h_type)
 
            has_hook = False
 
            log.debug('Installing git hook in repo %s', repo)
 
            if os.path.exists(_hook_file):
 
                # let's take a look at this hook, maybe it's kallithea ?
 
                log.debug('hook exists, checking if it is from kallithea')
 
                with open(_hook_file, 'rb') as f:
 
                    data = f.read()
 
                    matches = re.compile(r'(?:%s)\s*=\s*(.*)'
 
                                         % 'KALLITHEA_HOOK_VER').search(data)
 
                    if matches:
 
                        try:
 
                            ver = matches.groups()[0]
 
                            log.debug('got %s it is kallithea', ver)
 
                            has_hook = True
 
                        except Exception:
 
                            log.error(traceback.format_exc())
 
            else:
 
                # there is no hook in this dir, so we want to create one
 
                has_hook = True
 

	
 
            if has_hook or force_create:
 
                log.debug('writing %s hook file !', h_type)
 
                try:
 
                    with open(_hook_file, 'wb') as f:
 
                        tmpl = tmpl.replace('_TMPL_', kallithea.__version__)
 
                        f.write(tmpl)
 
                    os.chmod(_hook_file, 0755)
 
                except IOError as e:
 
                    log.error('error writing %s: %s', _hook_file, e)
 
            else:
 
                log.debug('skipping writing hook file')
 

	
 
def AvailableRepoGroupChoices(top_perms, repo_group_perms, extras=()):
 
    """Return group_id,string tuples with choices for all the repo groups where
 
    the user has the necessary permissions.
 

	
 
    Top level is -1.
 
    """
 
    groups = RepoGroup.query().all()
 
    if HasPermissionAll('hg.admin')('available repo groups'):
 
        groups.append(None)
 
    else:
 
        groups = list(RepoGroupList(groups, perm_set=repo_group_perms))
 
        if top_perms and HasPermissionAny(*top_perms)('available repo groups'):
 
            groups.append(None)
 
        for extra in extras:
 
            if not any(rg == extra for rg in groups):
 
                groups.append(extra)
 
    return RepoGroup.groups_choices(groups=groups)
kallithea/tests/api/api_base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
tests for api. run with::
 

	
 
    KALLITHEA_WHOOSH_TEST_DISABLE=1 nosetests --with-coverage --cover-package=kallithea.controllers.api.api -x kallithea/tests/api
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import random
 
import mock
 

	
 
from kallithea.tests import *
 
from kallithea.tests.fixture import Fixture
 
from kallithea.lib.compat import json
 
from kallithea.lib.auth import AuthUser
 
from kallithea.model.user import UserModel
 
from kallithea.model.user_group import UserGroupModel
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.meta import Session
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.gist import GistModel
 
from kallithea.model.db import Repository, User, Setting
 
from kallithea.lib.utils2 import time_to_datetime
 

	
 

	
 
API_URL = '/_admin/api'
 
TEST_USER_GROUP = 'test_user_group'
 
TEST_REPO_GROUP = 'test_repo_group'
 

	
 
fixture = Fixture()
 

	
 

	
 
def _build_data(apikey, method, **kw):
 
    """
 
    Builds API data with given random ID
 

	
 
    :param random_id:
 
    """
 
    random_id = random.randrange(1, 9999)
 
    return random_id, json.dumps({
 
        "id": random_id,
 
        "api_key": apikey,
 
        "method": method,
 
        "args": kw
 
    })
 

	
 

	
 
jsonify = lambda obj: json.loads(json.dumps(obj))
 

	
 

	
 
def crash(*args, **kwargs):
 
    raise Exception('Total Crash !')
 

	
 

	
 
def api_call(test_obj, params):
 
    response = test_obj.app.post(API_URL, content_type='application/json',
 
                                 params=params)
 
    return response
 

	
 

	
 
## helpers
 
def make_user_group(name=TEST_USER_GROUP):
 
    gr = fixture.create_user_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
 
    UserGroupModel().add_user_to_group(user_group=gr,
 
                                       user=TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
def make_repo_group(name=TEST_REPO_GROUP):
 
    gr = fixture.create_repo_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
class _BaseTestApi(object):
 
    REPO = None
 
    REPO_TYPE = None
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        cls.usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        cls.apikey = cls.usr.api_key
 
        cls.test_user = UserModel().create_or_update(
 
            username='test-api',
 
            password='test',
 
            email='test@example.com',
 
            firstname='first',
 
            lastname='last'
 
        )
 
        Session().commit()
 
        cls.TEST_USER_LOGIN = cls.test_user.username
 
        cls.apikey_regular = cls.test_user.api_key
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def setUp(self):
 
        self.maxDiff = None
 
        make_user_group()
 
        make_repo_group()
 

	
 
    def tearDown(self):
 
        fixture.destroy_user_group(TEST_USER_GROUP)
 
        fixture.destroy_gists()
 
        fixture.destroy_repo_group(TEST_REPO_GROUP)
 

	
 
    def _compare_ok(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': None,
 
            'result': expected
 
        })
 
        given = json.loads(given)
 
        self.assertEqual(expected, given)
 

	
 
    def _compare_error(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': expected,
 
            'result': None
 
        })
 
        given = json.loads(given)
 
        self.assertEqual(expected, given)
 

	
 
    def test_Optional_object(self):
 
        from kallithea.controllers.api.api import Optional
 

	
 
        option1 = Optional(None)
 
        self.assertEqual('<Optional:%s>' % None, repr(option1))
 
        self.assertEqual(option1(), None)
 

	
 
        self.assertEqual(1, Optional.extract(Optional(1)))
 
        self.assertEqual('trololo', Optional.extract('trololo'))
 

	
 
    def test_Optional_OAttr(self):
 
        from kallithea.controllers.api.api import Optional, OAttr
 

	
 
        option1 = Optional(OAttr('apiuser'))
 
        self.assertEqual('apiuser', Optional.extract(option1))
 

	
 
    def test_OAttr_object(self):
 
        from kallithea.controllers.api.api import OAttr
 

	
 
        oattr1 = OAttr('apiuser')
 
        self.assertEqual('<OptionalAttr:apiuser>', repr(oattr1))
 
        self.assertEqual(oattr1(), oattr1)
 

	
 
    def test_api_wrong_key(self):
 
        id_, params = _build_data('trololo', 'get_user')
 
        response = api_call(self, params)
 

	
 
        expected = 'Invalid API key'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param_args_null(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        params = params.replace('"args": {}', '"args": null')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param_args_bad(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        params = params.replace('"args": {}', '"args": 1')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_args_is_null(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        params = params.replace('"args": {}', '"args": null')
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_api_args_is_bad(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        params = params.replace('"args": {}', '"args": 1')
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_api_args_different_args(self):
 
        import string
 
        expected = {
 
            'ascii_letters': string.ascii_letters,
 
            'ws': string.whitespace,
 
            'printables': string.printable
 
        }
 
        id_, params = _build_data(self.apikey, 'test', args=expected)
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 
        self._compare_ok(id_, expected, response.body)
 

	
 
    def test_api_get_users(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        response = api_call(self, params)
 
        ret_all = []
 
        _users = User.query().filter(User.username != User.DEFAULT_USER) \
 
            .order_by(User.username).all()
 
        for usr in _users:
 
            ret = usr.get_api_data()
 
            ret_all.append(jsonify(ret))
 
        expected = ret_all
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_that_does_not_exist(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "user `%s` does not exist" % 'trololo'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid(self):
 
        id_, params = _build_data(self.apikey, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(self.TEST_USER_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_with_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user',
 
                                  userid=self.TEST_USER_LOGIN)
 
        response = api_call(self, params)
 

	
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_pull(self):
 
        repo_name = 'test_pull'
 
        r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        r.clone_uri = os.path.join(TESTS_TMP_PATH, self.REPO)
 
        Session.add(r)
 
        Session.commit()
 

	
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=repo_name,)
 
        response = api_call(self, params)
 

	
 
        expected = {'msg': 'Pulled from `%s`' % repo_name,
 
                    'repository': repo_name}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_pull_error(self):
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=self.REPO, )
 
        response = api_call(self, params)
 

	
 
        expected = 'Unable to pull changes from `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_rescan_repos(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos')
 
        response = api_call(self, params)
 

	
 
        expected = {'added': [], 'removed': []}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'repo_scan', crash)
 
    def test_api_rescann_error(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos', )
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during rescan repositories action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached()  # seed cache
 

	
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = {
 
            'msg': "Cache for repository `%s` was invalidated" % (self.REPO,),
 
            'repository': self.REPO
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'mark_for_invalidation', crash)
 
    def test_api_invalidate_cache_error(self):
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during cache invalidation action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache_regular_user_no_permission(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached() # seed cache
 

	
 
        id_, params = _build_data(self.apikey_regular, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = "repository `%s` does not exist" % (self.REPO,)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = {
 
            'repo': self.REPO, 'locked': True,
 
            'locked_since': response.json['result']['locked_since'],
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_by_non_admin(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = {
 
                'repo': repo_name,
 
                'locked': True,
 
                'locked_since': response.json['result']['locked_since'],
 
                'locked_by': self.TEST_USER_LOGIN,
 
                'lock_state_changed': True,
 
                'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                        % (self.TEST_USER_LOGIN, repo_name, True))
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_with_userid(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      userid=TEST_USER_ADMIN_LOGIN,
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = 'userid is not the same as your user'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_not_his_repo(self):
 
        id_, params = _build_data(self.apikey_regular, 'lock',
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_release(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=False)
 
        response = api_call(self, params)
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': False,
 
            'locked_since': None,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, False))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_optional_userid(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        time_ = response.json['result']['locked_since']
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': True,
 
            'locked_since': time_,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        }
 

	
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_optional_locked(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 
        time_ = response.json['result']['locked_since']
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': True,
 
            'locked_since': time_,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': False,
 
            'msg': ('Repo `%s` locked by `%s` on `%s`.'
 
                    % (self.REPO, TEST_USER_ADMIN_LOGIN,
 
                       json.dumps(time_to_datetime(time_))))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_optional_not_locked(self):
 
        repo_name = 'api_not_locked'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        self.assertEqual(repo.locked, [None, None])
 
        try:
 
            id_, params = _build_data(self.apikey, 'lock',
 
                                      repoid=repo.repo_id)
 
            response = api_call(self, params)
 
            expected = {
 
                'repo': repo_name,
 
                'locked': False,
 
                'locked_since': None,
 
                'locked_by': None,
 
                'lock_state_changed': False,
 
                'msg': ('Repo `%s` not locked.' % (repo_name,))
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    @mock.patch.object(Repository, 'lock', crash)
 
    def test_api_lock_error(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred locking repository `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks_regular_user(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_locks')
 
        response = api_call(self, params)
 
        expected = []
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks_with_userid_regular_user(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_locks',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks(self):
 
        id_, params = _build_data(self.apikey, 'get_locks')
 
        response = api_call(self, params)
 
        expected = []
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks_with_one_locked_repo(self):
 
        repo_name = 'api_delete_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                                   cur_user=self.TEST_USER_LOGIN)
 
        Repository.lock(repo, User.get_by_username(self.TEST_USER_LOGIN).user_id)
 
        try:
 
            id_, params = _build_data(self.apikey, 'get_locks')
 
            response = api_call(self, params)
 
            expected = [repo.get_api_data()]
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_get_locks_with_one_locked_repo_for_specific_user(self):
 
        repo_name = 'api_delete_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                                   cur_user=self.TEST_USER_LOGIN)
 
        Repository.lock(repo, User.get_by_username(self.TEST_USER_LOGIN).user_id)
 
        try:
 
            id_, params = _build_data(self.apikey, 'get_locks',
 
                                      userid=self.TEST_USER_LOGIN)
 
            response = api_call(self, params)
 
            expected = [repo.get_api_data()]
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_get_locks_with_userid(self):
 
        id_, params = _build_data(self.apikey, 'get_locks',
 
                                  userid=TEST_USER_REGULAR_LOGIN)
 
        response = api_call(self, params)
 
        expected = []
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_create_existing_user(self):
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=TEST_USER_ADMIN_LOGIN,
 
                                  email='test@foo.com',
 
                                  password='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "user `%s` already exist" % TEST_USER_ADMIN_LOGIN
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_user_with_existing_email(self):
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=TEST_USER_ADMIN_LOGIN + 'new',
 
                                  email=TEST_USER_REGULAR_EMAIL,
 
                                  password='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "email `%s` already exist" % TEST_USER_REGULAR_EMAIL
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_user(self):
 
        username = 'test_new_api_user'
 
        email = username + "@foo.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email,
 
                                  password='trololo')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 

	
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    def test_api_create_user_without_password(self):
 
        username = 'test_new_api_user_passwordless'
 
        email = username + "@foo.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email)
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    def test_api_create_user_with_extern_name(self):
 
        username = 'test_new_api_user_passwordless'
 
        email = username + "@foo.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email, extern_name='internal')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    @mock.patch.object(UserModel, 'create_or_update', crash)
 
    def test_api_create_user_when_exception_happened(self):
 

	
 
        username = 'test_new_api_user'
 
        email = username + "@foo.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email,
 
                                  password='trololo')
 
        response = api_call(self, params)
 
        expected = 'failed to create user `%s`' % username
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_delete_user(self):
 
        usr = UserModel().create_or_update(username=u'test_user',
 
                                           password=u'qweqwe',
 
                                           email=u'u232@example.com',
 
                                           firstname=u'u1', lastname=u'u1')
 
        Session().commit()
 
        username = usr.username
 
        email = usr.email
 
        usr_id = usr.user_id
 
        ## DELETE THIS USER NOW
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user',
 
                                  userid=username, )
 
        response = api_call(self, params)
 

	
 
        ret = {'msg': 'deleted user ID:%s %s' % (usr_id, username),
 
               'user': None}
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserModel, 'delete', crash)
 
    def test_api_delete_user_when_exception_happened(self):
 
        usr = UserModel().create_or_update(username=u'test_user',
 
                                           password=u'qweqwe',
 
                                           email=u'u232@example.com',
 
                                           firstname=u'u1', lastname=u'u1')
 
        Session().commit()
 
        username = usr.username
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user',
 
                                  userid=username, )
 
        response = api_call(self, params)
 
        ret = 'failed to delete user ID:%s %s' % (usr.user_id,
 
                                                  usr.username)
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('firstname', 'new_username'),
 
                           ('lastname', 'new_username'),
 
                           ('email', 'new_username'),
 
                           ('admin', True),
 
                           ('admin', False),
 
                           ('extern_type', 'ldap'),
 
                           ('extern_type', None),
 
                           ('extern_name', 'test'),
 
                           ('extern_name', None),
 
                           ('active', False),
 
                           ('active', True),
 
                           ('password', 'newpass')
 
    ])
 
    def test_api_update_user(self, name, expected):
 
        usr = User.get_by_username(self.TEST_USER_LOGIN)
 
        kw = {name: expected,
 
              'userid': usr.user_id}
 
        id_, params = _build_data(self.apikey, 'update_user', **kw)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, self.TEST_USER_LOGIN),
 
            'user': jsonify(User \
 
                .get_by_username(self.TEST_USER_LOGIN) \
 
                .get_api_data())
 
        }
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_no_changed_params(self):
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 

	
 
        response = api_call(self, params)
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, TEST_USER_ADMIN_LOGIN),
 
            'user': ret
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_by_user_id(self):
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, TEST_USER_ADMIN_LOGIN),
 
            'user': ret
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_default_user(self):
 
        usr = User.get_default_user()
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        expected = 'editing default user is forbidden'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserModel, 'update_user', crash)
 
    def test_api_update_user_when_exception_happens(self):
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        ret = 'failed to update user `%s`' % usr.user_id
 

	
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo(self):
 
        new_group = 'some_new_group'
 
        make_user_group(new_group)
 
        RepoModel().grant_user_group_permission(repo=self.REPO,
 
                                                group_name=new_group,
 
                                                perm='repository.read')
 
        Session().commit()
 
        id_, params = _build_data(self.apikey, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        ret = repo.get_api_data()
 

	
 
        members = []
 
        followers = []
 
        for user in repo.repo_to_perm:
 
            perm = user.permission.permission_name
 
            user = user.user
 
            user_data = {'name': user.username, 'type': "user",
 
                         'permission': perm}
 
            members.append(user_data)
 

	
 
        for user_group in repo.users_group_to_perm:
 
            perm = user_group.permission.permission_name
 
            user_group = user_group.users_group
 
            user_group_data = {'name': user_group.users_group_name,
 
                               'type': "user_group", 'permission': perm}
 
            members.append(user_group_data)
 

	
 
        for user in repo.followers:
 
            followers.append(user.user.get_api_data())
 

	
 
        ret['members'] = members
 
        ret['followers'] = followers
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_user_group(new_group)
 

	
 
    @parameterized.expand([
 
        ('repository.admin',),
 
        ('repository.write',),
 
        ('repository.read',),
 
    ])
 
    def test_api_get_repo_by_non_admin(self, grant_perm):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=grant_perm)
 
        Session().commit()
 
        id_, params = _build_data(self.apikey_regular, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        ret = repo.get_api_data()
 

	
 
        members = []
 
        followers = []
 
        self.assertEqual(2, len(repo.repo_to_perm))
 
        for user in repo.repo_to_perm:
 
            perm = user.permission.permission_name
 
            user_obj = user.user
 
            user_data = {'name': user_obj.username, 'type': "user",
 
                         'permission': perm}
 
            members.append(user_data)
 

	
 
        for user_group in repo.users_group_to_perm:
 
            perm = user_group.permission.permission_name
 
            user_group_obj = user_group.users_group
 
            user_group_data = {'name': user_group_obj.users_group_name,
 
                               'type': "user_group", 'permission': perm}
 
            members.append(user_group_data)
 

	
 
        for user in repo.followers:
 
            followers.append(user.user.get_api_data())
 

	
 
        ret['members'] = members
 
        ret['followers'] = followers
 

	
 
        expected = ret
 
        try:
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 

	
 
    def test_api_get_repo_by_non_admin_no_permission_to_repo(self):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.none')
 

	
 
        id_, params = _build_data(self.apikey_regular, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_that_doesn_not_exist(self):
 
        id_, params = _build_data(self.apikey, 'get_repo',
 
                                  repoid='no-such-repo')
 
        response = api_call(self, params)
 

	
 
        ret = 'repository `%s` does not exist' % 'no-such-repo'
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repos(self):
 
        id_, params = _build_data(self.apikey, 'get_repos')
 
        response = api_call(self, params)
 

	
 
        result = []
 
        for repo in RepoModel().get_all():
 
            result.append(repo.get_api_data())
 
        ret = jsonify(result)
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_repos_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_repos')
 
        response = api_call(self, params)
 

	
 
        result = []
 
        for repo in RepoModel().get_all_user_repos(self.TEST_USER_LOGIN):
 
            result.append(repo.get_api_data())
 
        ret = jsonify(result)
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('all', 'all'),
 
                           ('dirs', 'dirs'),
 
                           ('files', 'files'), ])
 
    def test_api_get_repo_nodes(self, name, ret_type):
 
        rev = 'tip'
 
        path = '/'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        # we don't the actual return types here since it's tested somewhere
 
        # else
 
        expected = response.json['result']
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_revisions(self):
 
        rev = 'i-dont-exist'
 
        path = '/'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to get repo: `%s` nodes' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_path(self):
 
        rev = 'tip'
 
        path = '/idontexits'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to get repo: `%s` nodes' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_ret_type(self):
 
        rev = 'tip'
 
        path = '/'
 
        ret_type = 'error'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        expected = ('ret_type must be one of %s'
 
                    % (','.join(['files', 'dirs', 'all'])))
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('all', 'all', 'repository.write'),
 
                           ('dirs', 'dirs', 'repository.admin'),
 
                           ('files', 'files', 'repository.read'), ])
 
    def test_api_get_repo_nodes_by_regular_user(self, name, ret_type, grant_perm):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=grant_perm)
 
        Session().commit()
 

	
 
        rev = 'tip'
 
        path = '/'
 
        id_, params = _build_data(self.apikey_regular, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        # we don't the actual return types here since it's tested somewhere
 
        # else
 
        expected = response.json['result']
 
        try:
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 

	
 
    def test_api_create_repo(self):
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_and_repo_group(self):
 
        repo_name = 'my_gr/api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        print params
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 
        fixture.destroy_repo_group('my_gr')
 

	
 
    def test_api_create_repo_in_repo_group_without_permission(self):
 
        repo_group_name = '%s/api-repo-repo' % TEST_REPO_GROUP
 
        repo_name = '%s/api-repo' % repo_group_name
 

	
 
        rg = fixture.create_repo_group(repo_group_name)
 
        Session().commit()
 
        RepoGroupModel().grant_user_permission(repo_group_name,
 
                                               self.TEST_USER_LOGIN,
 
                                               'group.none')
 
        Session().commit()
 

	
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        # Current result when API access control is different from Web:
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
        # Expected and arguably more correct result:
 
        #expected = 'failed to create repository `%s`' % repo_name
 
        #self._compare_error(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo_group(repo_group_name)
 

	
 
    def test_api_create_repo_unknown_owner(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=owner,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 
        expected = 'user `%s` does not exist' % owner
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_repo_dont_specify_owner(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_by_non_admin(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_by_non_admin_specify_owner(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
                                  owner=owner)
 
        response = api_call(self, params)
 

	
 
        expected = 'Only Kallithea admin can specify `owner` param'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_exists(self):
 
        repo_name = self.REPO
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = "repo `%s` already exist" % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'create', crash)
 
    def test_api_create_repo_exception_occurred(self):
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = 'failed to create repository `%s`' % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([
 
        ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
 
        ('description', {'description': 'new description'}),
 
        ('active', {'active': True}),
 
        ('active', {'active': False}),
 
        ('clone_uri', {'clone_uri': 'http://foo.com/repo'}),
 
        ('clone_uri', {'clone_uri': None}),
 
        ('landing_rev', {'landing_rev': 'branch:master'}),
 
        ('enable_statistics', {'enable_statistics': True}),
 
        ('enable_locking', {'enable_locking': True}),
 
        ('enable_downloads', {'enable_downloads': True}),
 
        ('name', {'name': 'new_repo_name'}),
 
        ('repo_group', {'group': 'test_group_for_update'}),
 
    ])
 
    def test_api_update_repo(self, changing_attr, updates):
 
        repo_name = 'api_update_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        if changing_attr == 'repo_group':
 
            fixture.create_repo_group(updates['group'])
 

	
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        if changing_attr == 'name':
 
            repo_name = updates['name']
 
        if changing_attr == 'repo_group':
 
            repo_name = '/'.join([updates['group'], repo_name])
 
        try:
 
            expected = {
 
                'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
 
                'repository': repo.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            if changing_attr == 'repo_group':
 
                fixture.destroy_repo_group(updates['group'])
 

	
 
    def test_api_update_repo_repo_group_does_not_exist(self):
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name)
 
        updates = {'group': 'test_group_for_update'}
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'repository group `%s` does not exist' % updates['group']
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_update_repo_regular_user_not_allowed(self):
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name)
 
        updates = {'active': False}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'repository `%s` does not exist' % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    @mock.patch.object(RepoModel, 'update', crash)
 
    def test_api_update_repo_exception_occurred(self):
 
        repo_name = 'api_update_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, owner=TEST_USER_ADMIN_LOGIN,)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'failed to update repo `%s`' % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_repo_name(self):
 
        repo_name = 'admin_owned'
 
        new_repo_name = 'new_repo_name'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        UserModel().revoke_perm('default', 'hg.create.repository')
 
        UserModel().grant_perm('default', 'hg.create.none')
 
        updates = {'name': new_repo_name}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'no permission to create (or move) repositories'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            fixture.destroy_repo(new_repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_repo_name_allowed(self):
 
        repo_name = 'admin_owned'
 
        new_repo_name = 'new_repo_name'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        UserModel().revoke_perm('default', 'hg.create.none')
 
        UserModel().grant_perm('default', 'hg.create.repository')
 
        updates = {'name': new_repo_name}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = {
 
                'msg': 'updated repo ID:%s %s' % (repo.repo_id, new_repo_name),
 
                'repository': repo.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            fixture.destroy_repo(new_repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_owner(self):
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        updates = {'owner': TEST_USER_ADMIN_LOGIN}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'Only Kallithea admin can specify `owner` param'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 

	
 
        id_, params = _build_data(self.apikey, 'delete_repo',
 
                                  repoid=repo_name, )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Deleted repository `%s`' % repo_name,
 
            'success': True
 
        }
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_by_non_admin(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        id_, params = _build_data(self.apikey_regular, 'delete_repo',
 
                                  repoid=repo_name, )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Deleted repository `%s`' % repo_name,
 
            'success': True
 
        }
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_by_non_admin_no_permission(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'delete_repo',
 
                                      repoid=repo_name, )
 
            response = api_call(self, params)
 
            expected = 'repository `%s` does not exist' % (repo_name)
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_exception_occurred(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        try:
 
            with mock.patch.object(RepoModel, 'delete', crash):
 
                id_, params = _build_data(self.apikey, 'delete_repo',
 
                                          repoid=repo_name, )
 
                response = api_call(self, params)
 

	
 
                expected = 'failed to delete repository `%s`' % repo_name
 
                self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_fork_repo(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
 
                                                     fork_name),
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
 
                                                     fork_name),
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin_specify_owner(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 
        expected = 'Only Kallithea admin can specify `owner` param'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin_no_permission_to_fork(self):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.none')
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    @parameterized.expand([('read', 'repository.read'),
 
                           ('write', 'repository.write'),
 
                           ('admin', 'repository.admin')])
 
    def test_api_fork_repo_non_admin_no_create_repo_permission(self, name, perm):
 
        fork_name = 'api-repo-fork'
 
        # regardless of base repository permission, forking is disallowed
 
        # when repository creation is disabled
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=perm)
 
        UserModel().revoke_perm('default', 'hg.create.repository')
 
        UserModel().grant_perm('default', 'hg.create.none')
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 
        expected = 'no permission to create repositories'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_unknown_owner(self):
 
        fork_name = 'api-repo-fork'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=owner,
 
        )
 
        response = api_call(self, params)
 
        expected = 'user `%s` does not exist' % owner
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_fork_repo_fork_exists(self):
 
        fork_name = 'api-repo-fork'
 
        fixture.create_fork(self.REPO, fork_name)
 

	
 
        try:
 
            fork_name = 'api-repo-fork'
 

	
 
            id_, params = _build_data(self.apikey, 'fork_repo',
 
                                      repoid=self.REPO,
 
                                      fork_name=fork_name,
 
                                      owner=TEST_USER_ADMIN_LOGIN,
 
            )
 
            response = api_call(self, params)
 

	
 
            expected = "fork `%s` already exist" % fork_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_repo_exists(self):
 
        fork_name = self.REPO
 

	
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        expected = "repo `%s` already exist" % fork_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'create_fork', crash)
 
    def test_api_fork_repo_exception_occurred(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to fork repository `%s` as `%s`' % (self.REPO,
 
                                                               fork_name)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_group(self):
 
        id_, params = _build_data(self.apikey, 'get_user_group',
 
                                  usergroupid=TEST_USER_GROUP)
 
        response = api_call(self, params)
 

	
 
        user_group = UserGroupModel().get_group(TEST_USER_GROUP)
 
        members = []
 
        for user in user_group.members:
 
            user = user.user
 
            members.append(user.get_api_data())
 

	
 
        ret = user_group.get_api_data()
 
        ret['members'] = members
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_groups(self):
 
        gr_name = 'test_user_group2'
 
        make_user_group(gr_name)
 

	
 
        id_, params = _build_data(self.apikey, 'get_user_groups', )
 
        response = api_call(self, params)
 

	
 
        try:
 
            expected = []
 
            for gr_name in [TEST_USER_GROUP, 'test_user_group2']:
 
                user_group = UserGroupModel().get_group(gr_name)
 
                ret = user_group.get_api_data()
 
                expected.append(ret)
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_create_user_group(self):
 
        group_name = 'some_new_group'
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=group_name)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'created new user group `%s`' % group_name,
 
            'user_group': jsonify(UserGroupModel() \
 
                .get_by_name(group_name) \
 
                .get_api_data())
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_user_group(group_name)
 

	
 
    def test_api_get_user_group_that_exist(self):
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=TEST_USER_GROUP)
 
        response = api_call(self, params)
 

	
 
        expected = "user group `%s` already exist" % TEST_USER_GROUP
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserGroupModel, 'create', crash)
 
    def test_api_get_user_group_exception_occurred(self):
 
        group_name = 'exception_happens'
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=group_name)
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to create group `%s`' % group_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('group_name', {'group_name': 'new_group_name'}),
 
                           ('group_name', {'group_name': 'test_group_for_update'}),
 
                           ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
 
                           ('active', {'active': False}),
 
                           ('active', {'active': True})])
 
    def test_api_update_user_group(self, changing_attr, updates):
 
        gr_name = 'test_group_for_update'
 
        user_group = fixture.create_user_group(gr_name)
 
        id_, params = _build_data(self.apikey, 'update_user_group',
 
                                  usergroupid=gr_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = {
kallithea/tests/functional/test_admin.py
Show inline comments
 
from __future__ import with_statement
 
import os
 
import csv
 
import datetime
 
from kallithea.tests import *
 
from kallithea.model.db import UserLog
 
from kallithea.model.meta import Session
 
from kallithea.lib.utils2 import safe_unicode
 

	
 
dn = os.path.dirname
 
FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'fixtures')
 

	
 

	
 
class TestAdminController(TestController):
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
        def strptime(val):
 
            fmt = '%Y-%m-%d %H:%M:%S'
 
            if '.' not in val:
 
                return datetime.datetime.strptime(val, fmt)
 

	
 
            nofrag, frag = val.split(".")
 
            date = datetime.datetime.strptime(nofrag, fmt)
 

	
 
            frag = frag[:6]  # truncate to microseconds
 
            frag += (6 - len(frag)) * '0'  # add 0s
 
            return date.replace(microsecond=int(frag))
 

	
 
        with open(os.path.join(FIXTURES, 'journal_dump.csv')) as f:
 
            for row in csv.DictReader(f):
 
                ul = UserLog()
 
                for k, v in row.iteritems():
 
                    v = safe_unicode(v)
 
                    if k == 'action_date':
 
                        v = strptime(v)
 
                    if k in ['user_id', 'repository_id']:
 
                        # nullable due to FK problems
 
                        v = None
 
                    setattr(ul, k, v)
 
                Session().add(ul)
 
            Session().commit()
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index'))
 
        response.mustcontain('Admin Journal')
 

	
 
    def test_filter_all_entries(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',))
 
        response.mustcontain('2034 Entries')
 

	
 
    def test_filter_journal_filter_exact_match_on_repository(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:xxx'))
 
        response.mustcontain('3 Entries')
 

	
 
    def test_filter_journal_filter_exact_match_on_repository_CamelCase(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:XxX'))
 
        response.mustcontain('3 Entries')
 

	
 
    def test_filter_journal_filter_wildcard_on_repository(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:*test*'))
 
        response.mustcontain('862 Entries')
 

	
 
    def test_filter_journal_filter_prefix_on_repository(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:test*'))
 
        response.mustcontain('257 Entries')
 

	
 
    def test_filter_journal_filter_prefix_on_repository_CamelCase(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:Test*'))
 
        response.mustcontain('257 Entries')
 

	
 
    def test_filter_journal_filter_prefix_on_repository_and_user(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:test* AND username:demo'))
 
        response.mustcontain('130 Entries')
 

	
 
    def test_filter_journal_filter_prefix_on_repository_or_other_repo(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='repository:test* OR repository:xxx'))
 
        response.mustcontain('260 Entries')  # 257 + 3
 

	
 
    def test_filter_journal_filter_exact_match_on_username(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='username:demo'))
 
        response.mustcontain('1087 Entries')
 

	
 
    def test_filter_journal_filter_exact_match_on_username_camelCase(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='username:DemO'))
 
        response.mustcontain('1087 Entries')
 

	
 
    def test_filter_journal_filter_wildcard_on_username(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='username:*test*'))
 
        response.mustcontain('100 Entries')
 

	
 
    def test_filter_journal_filter_prefix_on_username(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='username:demo*'))
 
        response.mustcontain('1101 Entries')
 

	
 
    def test_filter_journal_filter_prefix_on_user_or_other_user(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='username:demo OR username:volcan'))
 
        response.mustcontain('1095 Entries')  # 1087 + 8
 

	
 
    def test_filter_journal_filter_wildcard_on_action(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='action:*pull_request*'))
 
        response.mustcontain('187 Entries')
 

	
 
    def test_filter_journal_filter_on_date(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='date:20121010'))
 
        response.mustcontain('47 Entries')
 

	
 
    def test_filter_journal_filter_on_date_2(self):
 
        self.log_user()
 
        response = self.app.get(url(controller='admin/admin', action='index',
 
                                    filter='date:20121020'))
 
        response.mustcontain('17 Entries')
kallithea/tests/functional/test_login.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
from __future__ import with_statement
 
import re
 
import time
 

	
 
import mock
 

	
 
from kallithea.tests import *
 
from kallithea.tests.fixture import Fixture
 
from kallithea.lib.utils2 import generate_api_key
 
from kallithea.lib.auth import check_password
 
from kallithea.lib import helpers as h
 
from kallithea.model.api_key import ApiKeyModel
 
from kallithea.model import validators
 
from kallithea.model.db import User, Notification
 
from kallithea.model.meta import Session
 
from kallithea.model.user import UserModel
 

	
 
fixture = Fixture()
 

	
 

	
 
class TestLoginController(TestController):
 
    def setUp(self):
 
        self.remove_all_notifications()
 
        self.assertEqual(Notification.query().all(), [])
 

	
 
    def test_index(self):
 
        response = self.app.get(url(controller='login', action='index'))
 
        self.assertEqual(response.status, '200 OK')
 
        # Test response...
 

	
 
    def test_login_admin_ok(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': TEST_USER_ADMIN_LOGIN,
 
                                  'password': TEST_USER_ADMIN_PASS})
 
        self.assertEqual(response.status, '302 Found')
 
        self.assert_authenticated_user(response, TEST_USER_ADMIN_LOGIN)
 

	
 
        response = response.follow()
 
        response.mustcontain('/%s' % HG_REPO)
 

	
 
    def test_login_regular_ok(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': TEST_USER_REGULAR_LOGIN,
 
                                  'password': TEST_USER_REGULAR_PASS})
 

	
 
        self.assertEqual(response.status, '302 Found')
 
        self.assert_authenticated_user(response, TEST_USER_REGULAR_LOGIN)
 

	
 
        response = response.follow()
 
        response.mustcontain('/%s' % HG_REPO)
 

	
 
    def test_login_ok_came_from(self):
 
        test_came_from = '/_admin/users'
 
        response = self.app.post(url(controller='login', action='index',
 
                                     came_from=test_came_from),
 
                                 {'username': TEST_USER_ADMIN_LOGIN,
 
                                  'password': TEST_USER_ADMIN_PASS})
 
        self.assertEqual(response.status, '302 Found')
 
        response = response.follow()
 

	
 
        self.assertEqual(response.status, '200 OK')
 
        response.mustcontain('Users Administration')
 

	
 
    def test_login_do_not_remember(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': TEST_USER_REGULAR_LOGIN,
 
                                  'password': TEST_USER_REGULAR_PASS,
 
                                  'remember': False})
 

	
 
        self.assertIn('Set-Cookie', response.headers)
 
        for cookie in response.headers.getall('Set-Cookie'):
 
            self.assertFalse(re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE),
 
                'Cookie %r has expiration date, but should be a session cookie' % cookie)
 

	
 
    def test_login_remember(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': TEST_USER_REGULAR_LOGIN,
 
                                  'password': TEST_USER_REGULAR_PASS,
 
                                  'remember': True})
 

	
 
        self.assertIn('Set-Cookie', response.headers)
 
        for cookie in response.headers.getall('Set-Cookie'):
 
            self.assertTrue(re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE),
 
                'Cookie %r should have expiration date, but is a session cookie' % cookie)
 

	
 
    def test_logout(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': TEST_USER_REGULAR_LOGIN,
 
                                  'password': TEST_USER_REGULAR_PASS})
 

	
 
        # Verify that a login session has been established.
 
        response = self.app.get(url(controller='login', action='index'))
 
        response = response.follow()
 
        self.assertIn('authuser', response.session)
 

	
 
        response.click('Log Out')
 

	
 
        # Verify that the login session has been terminated.
 
        response = self.app.get(url(controller='login', action='index'))
 
        self.assertNotIn('authuser', response.session)
 

	
 
    @parameterized.expand([
 
          ('data:text/html,<script>window.alert("xss")</script>',),
 
          ('mailto:test@example.com',),
 
          ('file:///etc/passwd',),
 
          ('ftp://some.ftp.server',),
 
          ('http://other.domain/bl%C3%A5b%C3%A6rgr%C3%B8d',),
 
    ])
 
    def test_login_bad_came_froms(self, url_came_from):
 
        response = self.app.post(url(controller='login', action='index',
 
                                     came_from=url_came_from),
 
                                 {'username': TEST_USER_ADMIN_LOGIN,
 
                                  'password': TEST_USER_ADMIN_PASS})
 
        self.assertEqual(response.status, '302 Found')
 
        self.assertEqual(response._environ['paste.testing_variables']
 
                         ['tmpl_context'].came_from, '/')
 
        response = response.follow()
 

	
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_login_short_password(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': TEST_USER_ADMIN_LOGIN,
 
                                  'password': 'as'})
 
        self.assertEqual(response.status, '200 OK')
 

	
 
        response.mustcontain('Enter 3 characters or more')
 

	
 
    def test_login_wrong_username_password(self):
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': 'error',
 
                                  'password': 'test12'})
 

	
 
        response.mustcontain('Invalid username or password')
 

	
 
    # verify that get arguments are correctly passed along login redirection
 

	
 
    @parameterized.expand([
 
        ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
 
        ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'},
 
             ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
 
    ])
 
    def test_redirection_to_login_form_preserves_get_args(self, args, args_encoded):
 
        with fixture.anon_access(False):
 
            response = self.app.get(url(controller='summary', action='index',
 
                                        repo_name=HG_REPO,
 
                                        **args))
 
            self.assertEqual(response.status, '302 Found')
 
            for encoded in args_encoded:
 
                self.assertIn(encoded, response.location)
 

	
 
    @parameterized.expand([
 
        ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
 
        ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'},
 
             ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
 
    ])
 
    def test_login_form_preserves_get_args(self, args, args_encoded):
 
        response = self.app.get(url(controller='login', action='index',
 
                                    came_from = '/_admin/users',
 
                                    **args))
 
        for encoded in args_encoded:
 
            self.assertIn(encoded, response.form.action)
 

	
 
    @parameterized.expand([
 
        ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
 
        ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'},
 
             ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
 
    ])
 
    def test_redirection_after_successful_login_preserves_get_args(self, args, args_encoded):
 
        response = self.app.post(url(controller='login', action='index',
 
                                     came_from = '/_admin/users',
 
                                     **args),
 
                                 {'username': TEST_USER_ADMIN_LOGIN,
 
                                  'password': TEST_USER_ADMIN_PASS})
 
        self.assertEqual(response.status, '302 Found')
 
        for encoded in args_encoded:
 
            self.assertIn(encoded, response.location)
 

	
 
    @parameterized.expand([
 
        ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')),
 
        ({'blue': u'blå'.encode('utf-8'), 'green':u'grøn'},
 
             ('blue=bl%C3%A5', 'green=gr%C3%B8n')),
 
    ])
 
    def test_login_form_after_incorrect_login_preserves_get_args(self, args, args_encoded):
 
        response = self.app.post(url(controller='login', action='index',
 
                                     came_from = '/_admin/users',
 
                                     **args),
 
                                 {'username': 'error',
 
                                  'password': 'test12'})
 

	
 
        response.mustcontain('Invalid username or password')
 
        for encoded in args_encoded:
 
            self.assertIn(encoded, response.form.action)
 

	
 
    #==========================================================================
 
    # REGISTRATIONS
 
    #==========================================================================
 
    def test_register(self):
 
        response = self.app.get(url(controller='login', action='register'))
 
        response.mustcontain('Sign Up')
 

	
 
    def test_register_err_same_username(self):
 
        uname = TEST_USER_ADMIN_LOGIN
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': uname,
 
                                             'password': 'test12',
 
                                             'password_confirmation': 'test12',
 
                                             'email': 'goodmail@domain.com',
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 

	
 
        msg = validators.ValidUsername()._messages['username_exists']
 
        msg = h.html_escape(msg % {'username': uname})
 
        response.mustcontain(msg)
 

	
 
    def test_register_err_same_email(self):
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': 'test_admin_0',
 
                                             'password': 'test12',
 
                                             'password_confirmation': 'test12',
 
                                             'email': TEST_USER_ADMIN_EMAIL,
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 

	
 
        msg = validators.UniqSystemEmail()()._messages['email_taken']
 
        response.mustcontain(msg)
 

	
 
    def test_register_err_same_email_case_sensitive(self):
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': 'test_admin_1',
 
                                             'password': 'test12',
 
                                             'password_confirmation': 'test12',
 
                                             'email': TEST_USER_ADMIN_EMAIL.title(),
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 
        msg = validators.UniqSystemEmail()()._messages['email_taken']
 
        response.mustcontain(msg)
 

	
 
    def test_register_err_wrong_data(self):
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': 'xs',
 
                                             'password': 'test',
 
                                             'password_confirmation': 'test',
 
                                             'email': 'goodmailm',
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 
        self.assertEqual(response.status, '200 OK')
 
        response.mustcontain('An email address must contain a single @')
 
        response.mustcontain('Enter a value 6 characters long or more')
 

	
 
    def test_register_err_username(self):
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': 'error user',
 
                                             'password': 'test12',
 
                                             'password_confirmation': 'test12',
 
                                             'email': 'goodmailm',
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 

	
 
        response.mustcontain('An email address must contain a single @')
 
        response.mustcontain('Username may only contain '
 
                'alphanumeric characters underscores, '
 
                'periods or dashes and must begin with an '
 
                'alphanumeric character')
 

	
 
    def test_register_err_case_sensitive(self):
 
        usr = TEST_USER_ADMIN_LOGIN.title()
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': usr,
 
                                             'password': 'test12',
 
                                             'password_confirmation': 'test12',
 
                                             'email': 'goodmailm',
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 

	
 
        response.mustcontain('An email address must contain a single @')
 
        msg = validators.ValidUsername()._messages['username_exists']
 
        msg = h.html_escape(msg % {'username': usr})
 
        response.mustcontain(msg)
 

	
 
    def test_register_special_chars(self):
 
        response = self.app.post(url(controller='login', action='register'),
 
                                        {'username': 'xxxaxn',
 
                                         'password': 'ąćźżąśśśś',
 
                                         'password_confirmation': 'ąćźżąśśśś',
 
                                         'email': 'goodmailm@test.plx',
 
                                         'firstname': 'test',
 
                                         'lastname': 'test'})
 

	
 
        msg = validators.ValidPassword()._messages['invalid_password']
 
        response.mustcontain(msg)
 

	
 
    def test_register_password_mismatch(self):
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': 'xs',
 
                                             'password': '123qwe',
 
                                             'password_confirmation': 'qwe123',
 
                                             'email': 'goodmailm@test.plxa',
 
                                             'firstname': 'test',
 
                                             'lastname': 'test'})
 
        msg = validators.ValidPasswordsMatch('password', 'password_confirmation')._messages['password_mismatch']
 
        response.mustcontain(msg)
 

	
 
    def test_register_ok(self):
 
        username = 'test_regular4'
 
        password = 'qweqwe'
 
        email = 'username@test.com'
 
        name = 'testname'
 
        lastname = 'testlastname'
 

	
 
        response = self.app.post(url(controller='login', action='register'),
 
                                            {'username': username,
 
                                             'password': password,
 
                                             'password_confirmation': password,
 
                                             'email': email,
 
                                             'firstname': name,
 
                                             'lastname': lastname,
 
                                             'admin': True})  # This should be overriden
 
        self.assertEqual(response.status, '302 Found')
 
        self.checkSessionFlash(response, 'You have successfully registered into Kallithea')
 

	
 
        ret = Session().query(User).filter(User.username == 'test_regular4').one()
 
        self.assertEqual(ret.username, username)
 
        self.assertEqual(check_password(password, ret.password), True)
 
        self.assertEqual(ret.email, email)
 
        self.assertEqual(ret.name, name)
 
        self.assertEqual(ret.lastname, lastname)
 
        self.assertNotEqual(ret.api_key, None)
 
        self.assertEqual(ret.admin, False)
 

	
 
    #==========================================================================
 
    # PASSWORD RESET
 
    #==========================================================================
 

	
 
    def test_forgot_password_wrong_mail(self):
 
        bad_email = 'username%wrongmail.org'
 
        response = self.app.post(
 
                        url(controller='login', action='password_reset'),
 
                            {'email': bad_email, }
 
        )
 

	
 
        response.mustcontain('An email address must contain a single @')
 

	
 
    def test_forgot_password(self):
 
        response = self.app.get(url(controller='login',
 
                                    action='password_reset'))
 
        self.assertEqual(response.status, '200 OK')
 

	
 
        username = 'test_password_reset_1'
 
        password = 'qweqwe'
 
        email = 'username@python-works.com'
 
        name = 'passwd'
 
        lastname = 'reset'
 
        timestamp = int(time.time())
 

	
 
        new = User()
 
        new.username = username
 
        new.password = password
 
        new.email = email
 
        new.name = name
 
        new.lastname = lastname
 
        new.api_key = generate_api_key()
 
        Session().add(new)
 
        Session().commit()
 

	
 
        response = self.app.post(url(controller='login',
 
                                     action='password_reset'),
 
                                 {'email': email, })
 

	
 
        self.checkSessionFlash(response, 'A password reset confirmation code has been sent')
 

	
 
        response = response.follow()
 

	
 
        # BAD TOKEN
 

	
 
        token = "bad"
 

	
 
        response = self.app.post(url(controller='login',
 
                                     action='password_reset_confirmation'),
 
                                 {'email': email,
 
                                  'timestamp': timestamp,
 
                                  'password': "p@ssw0rd",
 
                                  'password_confirm': "p@ssw0rd",
 
                                  'token': token,
 
                                 })
 
        self.assertEqual(response.status, '200 OK')
 
        response.mustcontain('Invalid password reset token')
 

	
 
        # GOOD TOKEN
 

	
 
        # TODO: The token should ideally be taken from the mail sent
 
        # above, instead of being recalculated.
 

	
 
        token = UserModel().get_reset_password_token(
 
            User.get_by_username(username), timestamp, self.authentication_token())
 

	
 
        response = self.app.get(url(controller='login',
 
                                    action='password_reset_confirmation',
 
                                    email=email,
 
                                    timestamp=timestamp,
 
                                    token=token))
 
        self.assertEqual(response.status, '200 OK')
 
        response.mustcontain("You are about to set a new password for the email address %s" % email)
 

	
 
        response = self.app.post(url(controller='login',
 
                                     action='password_reset_confirmation'),
 
                                 {'email': email,
 
                                  'timestamp': timestamp,
 
                                  'password': "p@ssw0rd",
 
                                  'password_confirm': "p@ssw0rd",
 
                                  'token': token,
 
                                 })
 
        self.assertEqual(response.status, '302 Found')
 
        self.checkSessionFlash(response, 'Successfully updated password')
 

	
 
        response = response.follow()
 

	
 
    #==========================================================================
 
    # API
 
    #==========================================================================
 

	
 
    def _get_api_whitelist(self, values=None):
 
        config = {'api_access_controllers_whitelist': values or []}
 
        return config
 

	
 
    @parameterized.expand([
 
        ('none', None),
 
        ('empty_string', ''),
 
        ('fake_number', '123456'),
 
        ('proper_api_key', None)
 
    ])
 
    def test_access_not_whitelisted_page_via_api_key(self, test_name, api_key):
 
        whitelist = self._get_api_whitelist([])
 
        with mock.patch('kallithea.CONFIG', whitelist):
 
            self.assertEqual([],
 
                             whitelist['api_access_controllers_whitelist'])
 
            if test_name == 'proper_api_key':
 
                #use builtin if api_key is None
 
                api_key = User.get_first_admin().api_key
 

	
 
            with fixture.anon_access(False):
 
                self.app.get(url(controller='changeset',
 
                                 action='changeset_raw',
 
                                 repo_name=HG_REPO, revision='tip', api_key=api_key),
 
                             status=403)
 

	
 
    @parameterized.expand([
 
        ('none', None, 302),
 
        ('empty_string', '', 302),
 
        ('fake_number', '123456', 302),
 
        ('fake_not_alnum', 'a-z', 302),
 
        ('fake_api_key', '0123456789abcdef0123456789ABCDEF01234567', 302),
 
        ('proper_api_key', None, 200)
 
    ])
 
    def test_access_whitelisted_page_via_api_key(self, test_name, api_key, code):
 
        whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw'])
 
        with mock.patch('kallithea.CONFIG', whitelist):
 
            self.assertEqual(['ChangesetController:changeset_raw'],
 
                             whitelist['api_access_controllers_whitelist'])
 
            if test_name == 'proper_api_key':
 
                api_key = User.get_first_admin().api_key
 

	
 
            with fixture.anon_access(False):
 
                self.app.get(url(controller='changeset',
 
                                 action='changeset_raw',
 
                                 repo_name=HG_REPO, revision='tip', api_key=api_key),
 
                             status=code)
 

	
 
    def test_access_page_via_extra_api_key(self):
 
        whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw'])
 
        with mock.patch('kallithea.CONFIG', whitelist):
 
            self.assertEqual(['ChangesetController:changeset_raw'],
 
                             whitelist['api_access_controllers_whitelist'])
 

	
 
            new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test')
 
            Session().commit()
 
            with fixture.anon_access(False):
 
                self.app.get(url(controller='changeset',
 
                                 action='changeset_raw',
 
                                 repo_name=HG_REPO, revision='tip', api_key=new_api_key.api_key),
 
                             status=200)
 

	
 
    def test_access_page_via_expired_api_key(self):
 
        whitelist = self._get_api_whitelist(['ChangesetController:changeset_raw'])
 
        with mock.patch('kallithea.CONFIG', whitelist):
 
            self.assertEqual(['ChangesetController:changeset_raw'],
 
                             whitelist['api_access_controllers_whitelist'])
 

	
 
            new_api_key = ApiKeyModel().create(TEST_USER_ADMIN_LOGIN, u'test')
 
            Session().commit()
 
            #patch the API key and make it expired
 
            new_api_key.expires = 0
 
            Session().add(new_api_key)
 
            Session().commit()
 
            with fixture.anon_access(False):
 
                self.app.get(url(controller='changeset',
 
                                 action='changeset_raw',
 
                                 repo_name=HG_REPO, revision='tip',
 
                                 api_key=new_api_key.api_key),
 
                             status=302)
kallithea/tests/models/test_diff_parsers.py
Show inline comments
 
from __future__ import with_statement
 
from kallithea.tests import *
 
from kallithea.lib.diffs import DiffProcessor, NEW_FILENODE, DEL_FILENODE, \
 
    MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE
 
from kallithea.tests.fixture import Fixture
 

	
 
fixture = Fixture()
 

	
 

	
 
DIFF_FIXTURES = {
 
    'hg_diff_add_single_binary_file.diff': [
 
        ('US Warszawa.jpg', 'A',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {NEW_FILENODE: 'new file 100755',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
    ],
 
    'hg_diff_mod_single_binary_file.diff': [
 
        ('US Warszawa.jpg', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {MOD_FILENODE: 'modified file',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
    ],
 

	
 
    'hg_diff_mod_single_file_and_rename_and_chmod.diff': [
 
        ('README', 'R',
 
         {'added': 3,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {RENAMED_FILENODE: 'file renamed from README.rst to README',
 
                  CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
 
    ],
 
    'hg_diff_mod_file_and_rename.diff': [
 
        ('README.rst', 'R',
 
         {'added': 3,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
 
    ],
 
    'hg_diff_del_single_binary_file.diff': [
 
        ('US Warszawa.jpg', 'D',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {DEL_FILENODE: 'deleted file',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
    ],
 
    'hg_diff_chmod_and_mod_single_binary_file.diff': [
 
        ('gravatar.png', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
    ],
 
    'hg_diff_chmod.diff': [
 
        ('file', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
 
    ],
 
    'hg_diff_rename_file.diff': [
 
        ('file_renamed', 'R',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
 
    ],
 
    'hg_diff_rename_and_chmod_file.diff': [
 
        ('README', 'R',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
 
                  RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
 
    ],
 
    'hg_diff_binary_and_normal.diff': [
 
        ('img/baseline-10px.png', 'A',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {NEW_FILENODE: 'new file 100644',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
        ('img/baseline-20px.png', 'D',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {DEL_FILENODE: 'deleted file',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
        ('index.html', 'M',
 
         {'added': 3,
 
          'deleted': 2,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('js/global.js', 'D',
 
         {'added': 0,
 
          'deleted': 75,
 
          'binary': False,
 
          'ops': {DEL_FILENODE: 'deleted file'}}),
 
        ('js/jquery/hashgrid.js', 'A',
 
         {'added': 340,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {NEW_FILENODE: 'new file 100755'}}),
 
        ('less/docs.less', 'M',
 
         {'added': 34,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('less/scaffolding.less', 'M',
 
         {'added': 1,
 
          'deleted': 3,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('readme.markdown', 'M',
 
         {'added': 1,
 
          'deleted': 10,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
    ],
 
    'git_diff_chmod.diff': [
 
        ('work-horus.xls', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
 
    ],
 
    'git_diff_rename_file.diff': [
 
        ('file.xls', 'R',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
 
    ],
 
    'git_diff_mod_single_binary_file.diff': [
 
        ('US Warszawa.jpg', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {MOD_FILENODE: 'modified file',
 
                  BIN_FILENODE: 'binary diff not shown'}})
 
    ],
 
    'git_diff_binary_and_normal.diff': [
 
        ('img/baseline-10px.png', 'A',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {NEW_FILENODE: 'new file 100644',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
        ('img/baseline-20px.png', 'D',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {DEL_FILENODE: 'deleted file',
 
                  BIN_FILENODE: 'binary diff not shown'}}),
 
        ('index.html', 'M',
 
         {'added': 3,
 
          'deleted': 2,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('js/global.js', 'D',
 
         {'added': 0,
 
          'deleted': 75,
 
          'binary': False,
 
          'ops': {DEL_FILENODE: 'deleted file'}}),
 
        ('js/jquery/hashgrid.js', 'A',
 
         {'added': 340,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {NEW_FILENODE: 'new file 100755'}}),
 
        ('less/docs.less', 'M',
 
         {'added': 34,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('less/scaffolding.less', 'M',
 
         {'added': 1,
 
          'deleted': 3,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('readme.markdown', 'M',
 
         {'added': 1,
 
          'deleted': 10,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
    ],
 
    'diff_with_diff_data.diff': [
 
        ('vcs/backends/base.py', 'M',
 
         {'added': 18,
 
          'deleted': 2,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('vcs/backends/git/repository.py', 'M',
 
         {'added': 46,
 
          'deleted': 15,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('vcs/backends/hg.py', 'M',
 
         {'added': 22,
 
          'deleted': 3,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('vcs/tests/test_git.py', 'M',
 
         {'added': 5,
 
          'deleted': 5,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
        ('vcs/tests/test_repository.py', 'M',
 
         {'added': 174,
 
          'deleted': 2,
 
          'binary': False,
 
          'ops': {MOD_FILENODE: 'modified file'}}),
 
    ],
 
    'git_diff_modify_binary_file.diff': [
 
        ('file.name', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {MOD_FILENODE: 'modified file',
 
                  BIN_FILENODE: 'binary diff not shown'}})
 
    ],
 
    'hg_diff_copy_file.diff': [
 
        ('file2', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
 
    ],
 
    'hg_diff_copy_and_modify_file.diff': [
 
        ('file3', 'M',
 
         {'added': 1,
 
          'deleted': 0,
 
          'binary': False,
 
          'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
 
                  MOD_FILENODE: 'modified file'}}),
 
    ],
 
    'hg_diff_copy_and_chmod_file.diff': [
 
        ('file4', 'M',
 
         {'added': 0,
 
          'deleted': 0,
 
          'binary': True,
 
          'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
 
                  CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
 
    ],
 
    'hg_diff_copy_chmod_and_edit_file.diff': [
 
        ('file5', 'M',
 
         {'added': 2,
 
          'deleted': 1,
 
          'binary': False,
 
          'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
 
                  CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
 
                  MOD_FILENODE: 'modified file'}}),
 
    ],
 
    'hg_diff_rename_space_cr.diff': [
 
        ('oh yes', 'R',
 
         {'added': 3,
 
          'deleted': 2,
 
          'binary': False,
 
          'ops': {RENAMED_FILENODE: 'file renamed from oh no to oh yes'}}),
 
    ],
 
}
 

	
 

	
 
class DiffLibTest(BaseTestCase):
 

	
 
    @parameterized.expand([(x,) for x in DIFF_FIXTURES])
 
    def test_diff(self, diff_fixture):
 
        diff = fixture.load_resource(diff_fixture, strip=False)
 
        diff_proc = DiffProcessor(diff)
 
        diff_proc_d = diff_proc.prepare()
 
        data = [(x['filename'], x['operation'], x['stats']) for x in diff_proc_d]
 
        expected_data = DIFF_FIXTURES[diff_fixture]
 
        self.assertListEqual(expected_data, data)
 

	
 
    def test_diff_markup(self):
 
        diff = fixture.load_resource('markuptest.diff', strip=False)
 
        diff_proc = DiffProcessor(diff)
 
        diff_proc_d = diff_proc.prepare()
 
        chunks = diff_proc_d[0]['chunks']
 
        self.assertFalse(chunks[0])
 
        #from pprint import pprint; pprint(chunks[1])
 
        l = ['\n']
 
        for d in chunks[1]:
 
            l.append('%(action)-7s %(new_lineno)3s %(old_lineno)3s %(line)r\n' % d)
 
        s = ''.join(l)
 
        print s
 
        self.assertEqual(s, r'''
 
context ... ... u'@@ -51,5 +51,12 @@\n'
 
unmod    51  51 u'<u>\t</u>begin();\n'
 
unmod    52  52 u'<u>\t</u>\n'
 
add      53     u'<u>\t</u>int foo;<u class="cr"></u>\n'
 
add      54     u'<u>\t</u>int bar; <u class="cr"></u>\n'
 
add      55     u'<u>\t</u>int baz;<u>\t</u><u class="cr"></u>\n'
 
add      56     u'<u>\t</u>int space; <i></i>'
 
add      57     u'<u>\t</u>int tab;<u>\t</u>\n'
 
add      58     u'<u>\t</u>\n'
 
unmod    59  53 u' <i></i>'
 
del          54 u'<u>\t</u><del>#define MAX_STEPS (48)</del>\n'
 
add      60     u'<u>\t</u><ins><u class="cr"></u></ins>\n'
 
add      61     u'<u>\t</u>#define MAX_STEPS (64)<u class="cr"></u>\n'
 
unmod    62  55 u'\n'
 
''')
kallithea/tests/other/test_libs.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.tests.other.test_libs
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Package for testing various lib/helper functions in kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 9, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import datetime
 
import hashlib
 
import mock
 
from kallithea.tests import *
 
from kallithea.lib.utils2 import AttributeDict
 
from kallithea.model.db import Repository
 

	
 
proto = 'http'
 
TEST_URLS = [
 
    ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
 
     '%s://127.0.0.1' % proto),
 
    ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
 
     '%s://127.0.0.1' % proto),
 
    ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
 
     '%s://127.0.0.1' % proto),
 
    ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
 
     '%s://127.0.0.1:8080' % proto),
 
    ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
 
     '%s://domain.org' % proto),
 
    ('%s://user:pass@domain.org:8080' % proto, ['%s://' % proto, 'domain.org',
 
                                                '8080'],
 
     '%s://domain.org:8080' % proto),
 
]
 

	
 
proto = 'https'
 
TEST_URLS += [
 
    ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
 
     '%s://127.0.0.1' % proto),
 
    ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
 
     '%s://127.0.0.1' % proto),
 
    ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'],
 
     '%s://127.0.0.1' % proto),
 
    ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'],
 
     '%s://127.0.0.1:8080' % proto),
 
    ('%s://domain.org' % proto, ['%s://' % proto, 'domain.org'],
 
     '%s://domain.org' % proto),
 
    ('%s://user:pass@domain.org:8080' % proto, ['%s://' % proto, 'domain.org',
 
                                                '8080'],
 
     '%s://domain.org:8080' % proto),
 
]
 

	
 

	
 
class TestLibs(BaseTestCase):
 

	
 
    @parameterized.expand(TEST_URLS)
 
    def test_uri_filter(self, test_url, expected, expected_creds):
 
        from kallithea.lib.utils2 import uri_filter
 
        self.assertEqual(uri_filter(test_url), expected)
 

	
 
    @parameterized.expand(TEST_URLS)
 
    def test_credentials_filter(self, test_url, expected, expected_creds):
 
        from kallithea.lib.utils2 import credentials_filter
 
        self.assertEqual(credentials_filter(test_url), expected_creds)
 

	
 
    @parameterized.expand([('t', True),
 
                           ('true', True),
 
                           ('y', True),
 
                           ('yes', True),
 
                           ('on', True),
 
                           ('1', True),
 
                           ('Y', True),
 
                           ('yeS', True),
 
                           ('Y', True),
 
                           ('TRUE', True),
 
                           ('T', True),
 
                           ('False', False),
 
                           ('F', False),
 
                           ('FALSE', False),
 
                           ('0', False),
 
                           ('-1', False),
 
                           ('', False)
 
    ])
 
    def test_str2bool(self, str_bool, expected):
 
        from kallithea.lib.utils2 import str2bool
 
        self.assertEqual(str2bool(str_bool), expected)
 

	
 
    def test_mention_extractor(self):
 
        from kallithea.lib.utils2 import extract_mentioned_users
 
        sample = (
 
            "@first hi there @world here's my email username@email.com "
 
            "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three "
 
            "@UPPER    @cAmEL @2one_more22 @john please see this http://org.pl "
 
            "@marian.user just do it @marco-polo and next extract @marco_polo "
 
            "user.dot  hej ! not-needed maril@domain.org"
 
        )
 

	
 
        s = sorted([
 
            '2one_more22', 'first', 'lukaszb', 'one', 'one_more22', 'UPPER', 'cAmEL', 'john',
 
            'marian.user', 'marco-polo', 'marco_polo', 'world'], key=lambda k: k.lower())
 
        self.assertEqual(s, extract_mentioned_users(sample))
 

	
 
    @parameterized.expand([
 
        (dict(), u'just now'),
 
        (dict(seconds= -1), u'1 second ago'),
 
        (dict(seconds= -60 * 2), u'2 minutes ago'),
 
        (dict(hours= -1), u'1 hour ago'),
 
        (dict(hours= -24), u'1 day ago'),
 
        (dict(hours= -24 * 5), u'5 days ago'),
 
        (dict(months= -1), u'1 month ago'),
 
        (dict(months= -1, days= -2), u'1 month and 2 days ago'),
 
        (dict(months= -1, days= -20), u'1 month and 19 days ago'),
 
        (dict(years= -1, months= -1), u'1 year and 1 month ago'),
 
        (dict(years= -1, months= -10), u'1 year and 10 months ago'),
 
        (dict(years= -2, months= -4), u'2 years and 4 months ago'),
 
        (dict(years= -2, months= -11), u'2 years and 11 months ago'),
 
        (dict(years= -3, months= -2), u'3 years and 2 months ago'),
 
    ])
 
    def test_age(self, age_args, expected):
 
        from kallithea.lib.utils2 import age
 
        from dateutil import relativedelta
 
        n = datetime.datetime(year=2012, month=5, day=17)
 
        delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
 
        self.assertEqual(age(n + delt(**age_args), now=n), expected)
 

	
 
    @parameterized.expand([
 
        (dict(), u'just now'),
 
        (dict(seconds= -1), u'1 second ago'),
 
        (dict(seconds= -60 * 2), u'2 minutes ago'),
 
        (dict(hours= -1), u'1 hour ago'),
 
        (dict(hours= -24), u'1 day ago'),
 
        (dict(hours= -24 * 5), u'5 days ago'),
 
        (dict(months= -1), u'1 month ago'),
 
        (dict(months= -1, days= -2), u'1 month ago'),
 
        (dict(months= -1, days= -20), u'1 month ago'),
 
        (dict(years= -1, months= -1), u'13 months ago'),
 
        (dict(years= -1, months= -10), u'22 months ago'),
 
        (dict(years= -2, months= -4), u'2 years ago'),
 
        (dict(years= -2, months= -11), u'3 years ago'),
 
        (dict(years= -3, months= -2), u'3 years ago'),
 
        (dict(years= -4, months= -8), u'5 years ago'),
 
    ])
 
    def test_age_short(self, age_args, expected):
 
        from kallithea.lib.utils2 import age
 
        from dateutil import relativedelta
 
        n = datetime.datetime(year=2012, month=5, day=17)
 
        delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
 
        self.assertEqual(age(n + delt(**age_args), show_short_version=True, now=n), expected)
 

	
 
    @parameterized.expand([
 
        (dict(), u'just now'),
 
        (dict(seconds=1), u'in 1 second'),
 
        (dict(seconds=60 * 2), u'in 2 minutes'),
 
        (dict(hours=1), u'in 1 hour'),
 
        (dict(hours=24), u'in 1 day'),
 
        (dict(hours=24 * 5), u'in 5 days'),
 
        (dict(months=1), u'in 1 month'),
 
        (dict(months=1, days=1), u'in 1 month and 1 day'),
 
        (dict(years=1, months=1), u'in 1 year and 1 month')
 
    ])
 
    def test_age_in_future(self, age_args, expected):
 
        from kallithea.lib.utils2 import age
 
        from dateutil import relativedelta
 
        n = datetime.datetime(year=2012, month=5, day=17)
 
        delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs)
 
        self.assertEqual(age(n + delt(**age_args), now=n), expected)
 

	
 
    def test_tag_exctrator(self):
 
        sample = (
 
            "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]"
 
            "[requires] [stale] [see<>=>] [see => http://url.com]"
 
            "[requires => url] [lang => python] [just a tag]"
 
            "[,d] [ => ULR ] [obsolete] [desc]]"
 
        )
 
        from kallithea.lib.helpers import urlify_text
 
        res = urlify_text(sample, stylize=True)
 
        self.assertIn('<div class="metatag" tag="tag">tag</div>', res)
 
        self.assertIn('<div class="metatag" tag="obsolete">obsolete</div>', res)
 
        self.assertIn('<div class="metatag" tag="stale">stale</div>', res)
 
        self.assertIn('<div class="metatag" tag="lang">python</div>', res)
 
        self.assertIn('<div class="metatag" tag="requires">requires =&gt; <a href="/url">url</a></div>', res)
 
        self.assertIn('<div class="metatag" tag="tag">tag</div>', res)
 

	
 
    def test_alternative_gravatar(self):
 
        from kallithea.lib.helpers import gravatar_url
 
        _md5 = lambda s: hashlib.md5(s).hexdigest()
 

	
 
        #mock pylons.url
 
        class fake_url(object):
 
            @classmethod
 
            def current(cls, *args, **kwargs):
 
                return 'https://server.com'
 

	
 
        #mock pylons.tmpl_context
 
        def fake_tmpl_context(_url):
 
            _c = AttributeDict()
 
            _c.visual = AttributeDict()
 
            _c.visual.use_gravatar = True
 
            _c.visual.gravatar_url = _url
 

	
 
            return _c
 

	
 

	
 
        with mock.patch('pylons.url', fake_url):
 
            fake = fake_tmpl_context(_url='http://test.com/{email}')
 
            with mock.patch('pylons.tmpl_context', fake):
 
                    from pylons import url
 
                    assert url.current() == 'https://server.com'
 
                    grav = gravatar_url(email_address='test@foo.com', size=24)
 
                    assert grav == 'http://test.com/test@foo.com'
 

	
 
            fake = fake_tmpl_context(_url='http://test.com/{email}')
 
            with mock.patch('pylons.tmpl_context', fake):
 
                grav = gravatar_url(email_address='test@foo.com', size=24)
 
                assert grav == 'http://test.com/test@foo.com'
 

	
 
            fake = fake_tmpl_context(_url='http://test.com/{md5email}')
 
            with mock.patch('pylons.tmpl_context', fake):
 
                em = 'test@foo.com'
 
                grav = gravatar_url(email_address=em, size=24)
 
                assert grav == 'http://test.com/%s' % (_md5(em))
 

	
 
            fake = fake_tmpl_context(_url='http://test.com/{md5email}/{size}')
 
            with mock.patch('pylons.tmpl_context', fake):
 
                em = 'test@foo.com'
 
                grav = gravatar_url(email_address=em, size=24)
 
                assert grav == 'http://test.com/%s/%s' % (_md5(em), 24)
 

	
 
            fake = fake_tmpl_context(_url='{scheme}://{netloc}/{md5email}/{size}')
 
            with mock.patch('pylons.tmpl_context', fake):
 
                em = 'test@foo.com'
 
                grav = gravatar_url(email_address=em, size=24)
 
                assert grav == 'https://server.com/%s/%s' % (_md5(em), 24)
 

	
 
    @parameterized.expand([
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '', 'http://vps1:8000/group/repo1'),
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/group/repo1'),
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {}, '/prefix', 'http://vps1:8000/prefix/group/repo1'),
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/prefix', 'http://user@vps1:8000/prefix/group/repo1'),
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '/prefix', 'http://username@vps1:8000/prefix/group/repo1'),
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'user'}, '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'),
 
        (Repository.DEFAULT_CLONE_URI, 'group/repo1', {'user': 'username'}, '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'),
 
        ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {}, '', 'http://vps1:8000/_23'),
 
        ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/_23'),
 
        ('http://{user}@{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://username@vps1:8000/_23'),
 
        ('http://{netloc}/_{repoid}', 'group/repo1', {'user': 'username'}, '', 'http://vps1:8000/_23'),
 
        ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {'user': 'username'}, '', 'https://username@proxy1.server.com/group/repo1'),
 
        ('https://{user}@proxy1.server.com/{repo}', 'group/repo1', {}, '', 'https://proxy1.server.com/group/repo1'),
 
        ('https://proxy1.server.com/{user}/{repo}', 'group/repo1', {'user': 'username'}, '', 'https://proxy1.server.com/username/group/repo1'),
 
    ])
 
    def test_clone_url_generator(self, tmpl, repo_name, overrides, prefix, expected):
 
        from kallithea.lib.utils2 import get_clone_url
 
        clone_url = get_clone_url(uri_tmpl=tmpl, qualified_home_url='http://vps1:8000'+prefix,
 
                                  repo_name=repo_name, repo_id=23, **overrides)
 
        self.assertEqual(clone_url, expected)
 

	
 
    def _quick_url(self, text, tmpl="""<a class="revision-link" href="%s">%s</a>""", url_=None):
 
        """
 
        Changes `some text url[foo]` => `some text <a href="/">foo</a>
 

	
 
        :param text:
 
        """
 
        import re
 
        # quickly change expected url[] into a link
 
        URL_PAT = re.compile(r'(?:url\[)(.+?)(?:\])')
 

	
 
        def url_func(match_obj):
 
            _url = match_obj.groups()[0]
 
            return tmpl % (url_ or '/some-url', _url)
 
        return URL_PAT.sub(url_func, text)
 

	
 
    @parameterized.expand([
 
      ("",
 
       ""),
 
      ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68",
 
       "git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68"),
 
      ("from rev 000000000000",
 
       "from rev url[000000000000]"),
 
      ("from rev 000000000000123123 also rev 000000000000",
 
       "from rev url[000000000000123123] also rev url[000000000000]"),
 
      ("this should-000 00",
 
       "this should-000 00"),
 
      ("longtextffffffffff rev 123123123123",
 
       "longtextffffffffff rev url[123123123123]"),
 
      ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff",
 
       "rev ffffffffffffffffffffffffffffffffffffffffffffffffff"),
 
      ("ffffffffffff some text traalaa",
 
       "url[ffffffffffff] some text traalaa"),
 
       ("""Multi line
 
       123123123123
 
       some text 123123123123
 
       sometimes !
 
       """,
 
       """Multi line
 
       url[123123123123]
 
       some text url[123123123123]
 
       sometimes !
 
       """)
 
    ])
 
    def test_urlify_changesets(self, sample, expected):
 
        def fake_url(self, *args, **kwargs):
 
            return '/some-url'
 

	
 
        expected = self._quick_url(expected)
 

	
 
        with mock.patch('pylons.url', fake_url):
 
            from kallithea.lib.helpers import urlify_changesets
 
            self.assertEqual(urlify_changesets(sample, 'repo_name'), expected)
 

	
 
    @parameterized.expand([
 
      ("",
 
       "",
 
       ""),
 
      ("https://svn.apache.org/repos",
 
       "url[https://svn.apache.org/repos]",
 
       "https://svn.apache.org/repos"),
 
      ("http://svn.apache.org/repos",
 
       "url[http://svn.apache.org/repos]",
 
       "http://svn.apache.org/repos"),
 
      ("from rev a also rev http://google.com",
 
       "from rev a also rev url[http://google.com]",
 
       "http://google.com"),
 
       ("""Multi line
 
       https://foo.bar.com
 
       some text lalala""",
 
       """Multi line
 
       url[https://foo.bar.com]
 
       some text lalala""",
 
       "https://foo.bar.com")
 
    ])
 
    def test_urlify_test(self, sample, expected, url_):
 
        from kallithea.lib.helpers import urlify_text
 
        expected = self._quick_url(expected,
 
                                   tmpl="""<a href="%s">%s</a>""", url_=url_)
 
        self.assertEqual(urlify_text(sample), expected)
 

	
 
    @parameterized.expand([
 
      ("", None),
 
      ("/_2", '2'),
 
      ("_2", '2'),
 
      ("/_2/", '2'),
 
      ("_2/", '2'),
 

	
 
      ("/_21", '21'),
 
      ("_21", '21'),
 
      ("/_21/", '21'),
 
      ("_21/", '21'),
 

	
 
      ("/_21/foobar", '21'),
 
      ("_21/121", '21'),
 
      ("/_21/_12", '21'),
 
      ("_21/prefix/foo", '21'),
 
    ])
 
    def test_get_repo_by_id(self, test, expected):
 
        from kallithea.lib.utils import _extract_id_from_repo_name
 
        _test = _extract_id_from_repo_name(test)
 
        self.assertEqual(_test, expected, msg='url:%s, got:`%s` expected: `%s`'
 
                                              % (test, _test, expected))
kallithea/tests/vcs/test_archives.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
import tarfile
 
import zipfile
 
import datetime
 
import tempfile
 
import StringIO
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.exceptions import VCSError
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class ArchivesTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('%d/file_%d.txt' % (x, x),
 
                        content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test_archive_zip(self):
 
        path = tempfile.mkstemp()[1]
 
        with open(path, 'wb') as f:
 
            self.tip.fill_archive(stream=f, kind='zip', prefix='repo')
 
        out = zipfile.ZipFile(path)
 

	
 
        for x in xrange(5):
 
            node_path = '%d/file_%d.txt' % (x, x)
 
            decompressed = StringIO.StringIO()
 
            decompressed.write(out.read('repo/' + node_path))
 
            self.assertEqual(
 
                decompressed.getvalue(),
 
                self.tip.get_node(node_path).content)
 

	
 
    def test_archive_tgz(self):
 
        path = tempfile.mkstemp()[1]
 
        with open(path, 'wb') as f:
 
            self.tip.fill_archive(stream=f, kind='tgz', prefix='repo')
 
        outdir = tempfile.mkdtemp()
 

	
 
        outfile = tarfile.open(path, 'r|gz')
 
        outfile.extractall(outdir)
 

	
 
        for x in xrange(5):
 
            node_path = '%d/file_%d.txt' % (x, x)
 
            self.assertEqual(
 
                open(os.path.join(outdir, 'repo/' + node_path)).read(),
 
                self.tip.get_node(node_path).content)
 

	
 
    def test_archive_tbz2(self):
 
        path = tempfile.mkstemp()[1]
 
        with open(path, 'w+b') as f:
 
            self.tip.fill_archive(stream=f, kind='tbz2', prefix='repo')
 
        outdir = tempfile.mkdtemp()
 

	
 
        outfile = tarfile.open(path, 'r|bz2')
 
        outfile.extractall(outdir)
 

	
 
        for x in xrange(5):
 
            node_path = '%d/file_%d.txt' % (x, x)
 
            self.assertEqual(
 
                open(os.path.join(outdir, 'repo/' + node_path)).read(),
 
                self.tip.get_node(node_path).content)
 

	
 
    def test_archive_default_stream(self):
 
        tmppath = tempfile.mkstemp()[1]
 
        with open(tmppath, 'w') as stream:
 
            self.tip.fill_archive(stream=stream)
 
        mystream = StringIO.StringIO()
 
        self.tip.fill_archive(stream=mystream)
 
        mystream.seek(0)
 
        with open(tmppath, 'r') as f:
 
            self.assertEqual(f.read(), mystream.read())
 

	
 
    def test_archive_wrong_kind(self):
 
        with self.assertRaises(VCSError):
 
            self.tip.fill_archive(kind='wrong kind')
 

	
 
    def test_archive_empty_prefix(self):
 
        with self.assertRaises(VCSError):
 
            self.tip.fill_archive(prefix='')
 

	
 
    def test_archive_prefix_with_leading_slash(self):
 
        with self.assertRaises(VCSError):
 
            self.tip.fill_archive(prefix='/any')
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s archive test' % alias).title().split())
 
    bases = (ArchivesTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_branches.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.lib import vcs
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.nodes import FileNode
 

	
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 

	
 
class BranchesTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': 'Initial commit',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='Foobar'),
 
                    FileNode('foobar2', content='Foobar II'),
 
                    FileNode('foo/bar/baz', content='baz here!'),
 
                ],
 
            },
 
            {
 
                'message': 'Changes...',
 
                'author': 'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('some/new.txt', content='news...'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'Foobar I'),
 
                ],
 
                'removed': [],
 
            },
 
        ]
 
        return commits
 

	
 
    def test_simple(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.date, datetime.datetime(2010, 1, 1, 21))
 

	
 
    def test_new_branch(self):
 
        # This check must not be removed to ensure the 'branches' LazyProperty
 
        # gets hit *before* the new 'foobar' branch got created:
 
        self.assertFalse('foobar' in self.repo.branches)
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertTrue('foobar' in self.repo.branches)
 
        self.assertEqual(foobar_tip.branch, 'foobar')
 

	
 
    def test_new_head(self):
 
        tip = self.repo.get_changeset()
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
            parents=[tip],
 
        )
 
        self.imc.change(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\nand more...\n'))
 
        newtip = self.imc.commit(
 
            message=u'At default branch',
 
            author=u'joe',
 
            branch=foobar_tip.branch,
 
            parents=[foobar_tip],
 
        )
 

	
 
        newest_tip = self.imc.commit(
 
            message=u'Merged with %s' % foobar_tip.raw_id,
 
            author=u'joe',
 
            branch=self.backend_class.DEFAULT_BRANCH_NAME,
 
            parents=[newtip, foobar_tip],
 
        )
 

	
 
        self.assertEqual(newest_tip.branch,
 
            self.backend_class.DEFAULT_BRANCH_NAME)
 

	
 
    def test_branch_with_slash_in_name(self):
 
        self.imc.add(vcs.nodes.FileNode('extrafile', content='Some data\n'))
 
        self.imc.commit(u'Branch with a slash!', author=u'joe',
 
            branch='issue/123')
 
        self.assertTrue('issue/123' in self.repo.branches)
 

	
 
    def test_branch_with_slash_in_name_and_similar_without(self):
 
        self.imc.add(vcs.nodes.FileNode('extrafile', content='Some data\n'))
 
        self.imc.commit(u'Branch with a slash!', author=u'joe',
 
            branch='issue/123')
 
        self.imc.add(vcs.nodes.FileNode('extrafile II', content='Some data\n'))
 
        self.imc.commit(u'Branch without a slash...', author=u'joe',
 
            branch='123')
 
        self.assertIn('issue/123', self.repo.branches)
 
        self.assertIn('123', self.repo.branches)
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s branches test' % alias).title().split())
 
    bases = (BranchesTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_changesets.py
Show inline comments
 
# encoding: utf8
 
from __future__ import with_statement
 

	
 
import time
 
import datetime
 
from kallithea.lib import vcs
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 
from kallithea.lib.vcs.backends.base import BaseChangeset
 
from kallithea.lib.vcs.nodes import (
 
    FileNode, AddedFileNodesGenerator,
 
    ChangedFileNodesGenerator, RemovedFileNodesGenerator
 
)
 
from kallithea.lib.vcs.exceptions import (
 
    BranchDoesNotExistError, ChangesetDoesNotExistError,
 
    RepositoryError, EmptyRepositoryError
 
)
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.conf import get_new_dir
 

	
 

	
 
class TestBaseChangeset(unittest.TestCase):
 

	
 
    def test_as_dict(self):
 
        changeset = BaseChangeset()
 
        changeset.id = 'ID'
 
        changeset.raw_id = 'RAW_ID'
 
        changeset.short_id = 'SHORT_ID'
 
        changeset.revision = 1009
 
        changeset.date = datetime.datetime(2011, 1, 30, 1, 45)
 
        changeset.message = 'Message of a commit'
 
        changeset.author = 'Joe Doe <joe.doe@example.com>'
 
        changeset.added = [FileNode('foo/bar/baz'), FileNode('foobar')]
 
        changeset.changed = []
 
        changeset.removed = []
 
        self.assertEqual(changeset.as_dict(), {
 
            'id': 'ID',
 
            'raw_id': 'RAW_ID',
 
            'short_id': 'SHORT_ID',
 
            'revision': 1009,
 
            'date': datetime.datetime(2011, 1, 30, 1, 45),
 
            'message': 'Message of a commit',
 
            'author': {
 
                'name': 'Joe Doe',
 
                'email': 'joe.doe@example.com',
 
            },
 
            'added': ['foo/bar/baz', 'foobar'],
 
            'changed': [],
 
            'removed': [],
 
        })
 

	
 
class _ChangesetsWithCommitsTestCaseixin(_BackendTestMixin):
 
    recreate_repo_per_test = True
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test_new_branch(self):
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertTrue('foobar' in self.repo.branches)
 
        self.assertEqual(foobar_tip.branch, 'foobar')
 
        # 'foobar' should be the only branch that contains the new commit
 
        self.assertNotEqual(*self.repo.branches.values())
 

	
 
    def test_new_head_in_default_branch(self):
 
        tip = self.repo.get_changeset()
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        foobar_tip = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
            parents=[tip],
 
        )
 
        self.imc.change(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\nand more...\n'))
 
        newtip = self.imc.commit(
 
            message=u'At default branch',
 
            author=u'joe',
 
            branch=foobar_tip.branch,
 
            parents=[foobar_tip],
 
        )
 

	
 
        newest_tip = self.imc.commit(
 
            message=u'Merged with %s' % foobar_tip.raw_id,
 
            author=u'joe',
 
            branch=self.backend_class.DEFAULT_BRANCH_NAME,
 
            parents=[newtip, foobar_tip],
 
        )
 

	
 
        self.assertEqual(newest_tip.branch,
 
            self.backend_class.DEFAULT_BRANCH_NAME)
 

	
 
    def test_get_changesets_respects_branch_name(self):
 
        tip = self.repo.get_changeset()
 
        self.imc.add(vcs.nodes.FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        doc_changeset = self.imc.commit(
 
            message=u'New branch: docs',
 
            author=u'joe',
 
            branch='docs',
 
        )
 
        self.imc.add(vcs.nodes.FileNode('newfile', content=''))
 
        self.imc.commit(
 
            message=u'Back in default branch',
 
            author=u'joe',
 
            parents=[tip],
 
        )
 
        default_branch_changesets = self.repo.get_changesets(
 
            branch_name=self.repo.DEFAULT_BRANCH_NAME)
 
        self.assertNotIn(doc_changeset, default_branch_changesets)
 

	
 
    def test_get_changeset_by_branch(self):
 
        for branch, sha in self.repo.branches.iteritems():
 
            self.assertEqual(sha, self.repo.get_changeset(branch).raw_id)
 

	
 
    def test_get_changeset_by_tag(self):
 
        for tag, sha in self.repo.tags.iteritems():
 
            self.assertEqual(sha, self.repo.get_changeset(tag).raw_id)
 

	
 
    def test_get_changeset_parents(self):
 
        for test_rev in [1, 2, 3]:
 
            sha = self.repo.get_changeset(test_rev-1)
 
            self.assertEqual([sha], self.repo.get_changeset(test_rev).parents)
 

	
 
    def test_get_changeset_children(self):
 
        for test_rev in [1, 2, 3]:
 
            sha = self.repo.get_changeset(test_rev+1)
 
            self.assertEqual([sha], self.repo.get_changeset(test_rev).children)
 

	
 

	
 
class _ChangesetsTestCaseMixin(_BackendTestMixin):
 
    recreate_repo_per_test = False
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': u'Commit %d' % x,
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test_simple(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.date, datetime.datetime(2010, 1, 3, 20))
 

	
 
    def test_get_changesets_is_ordered_by_date(self):
 
        changesets = list(self.repo.get_changesets())
 
        ordered_by_date = sorted(changesets,
 
            key=lambda cs: cs.date)
 
        self.assertItemsEqual(changesets, ordered_by_date)
 

	
 
    def test_get_changesets_respects_start(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(start=second_id))
 
        self.assertEqual(len(changesets), 4)
 

	
 
    def test_get_changesets_numerical_id_respects_start(self):
 
        second_id = 1
 
        changesets = list(self.repo.get_changesets(start=second_id))
 
        self.assertEqual(len(changesets), 4)
 

	
 
    def test_get_changesets_includes_start_changeset(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(start=second_id))
 
        self.assertEqual(changesets[0].raw_id, second_id)
 

	
 
    def test_get_changesets_respects_end(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(end=second_id))
 
        self.assertEqual(changesets[-1].raw_id, second_id)
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_numerical_id_respects_end(self):
 
        second_id = 1
 
        changesets = list(self.repo.get_changesets(end=second_id))
 
        self.assertEqual(changesets.index(changesets[-1]), second_id)
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_respects_both_start_and_end(self):
 
        second_id = self.repo.revisions[1]
 
        third_id = self.repo.revisions[2]
 
        changesets = list(self.repo.get_changesets(start=second_id,
 
            end=third_id))
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_numerical_id_respects_both_start_and_end(self):
 
        changesets = list(self.repo.get_changesets(start=2, end=3))
 
        self.assertEqual(len(changesets), 2)
 

	
 
    def test_get_changesets_on_empty_repo_raises_EmptyRepository_error(self):
 
        Backend = self.get_backend()
 
        repo_path = get_new_dir(str(time.time()))
 
        repo = Backend(repo_path, create=True)
 

	
 
        with self.assertRaises(EmptyRepositoryError):
 
            list(repo.get_changesets(start='foobar'))
 

	
 
    def test_get_changesets_includes_end_changeset(self):
 
        second_id = self.repo.revisions[1]
 
        changesets = list(self.repo.get_changesets(end=second_id))
 
        self.assertEqual(changesets[-1].raw_id, second_id)
 

	
 
    def test_get_changesets_respects_start_date(self):
 
        start_date = datetime.datetime(2010, 2, 1)
 
        for cs in self.repo.get_changesets(start_date=start_date):
 
            self.assertGreaterEqual(cs.date, start_date)
 

	
 
    def test_get_changesets_respects_end_date(self):
 
        start_date = datetime.datetime(2010, 1, 1)
 
        end_date = datetime.datetime(2010, 2, 1)
 
        for cs in self.repo.get_changesets(start_date=start_date,
 
                                           end_date=end_date):
 
            self.assertGreaterEqual(cs.date, start_date)
 
            self.assertLessEqual(cs.date, end_date)
 

	
 
    def test_get_changesets_respects_start_date_and_end_date(self):
 
        end_date = datetime.datetime(2010, 2, 1)
 
        for cs in self.repo.get_changesets(end_date=end_date):
 
            self.assertLessEqual(cs.date, end_date)
 

	
 
    def test_get_changesets_respects_reverse(self):
 
        changesets_id_list = [cs.raw_id for cs in
 
            self.repo.get_changesets(reverse=True)]
 
        self.assertItemsEqual(changesets_id_list, reversed(self.repo.revisions))
 

	
 
    def test_get_filenodes_generator(self):
 
        tip = self.repo.get_changeset()
 
        filepaths = [node.path for node in tip.get_filenodes_generator()]
 
        self.assertItemsEqual(filepaths, ['file_%d.txt' % x for x in xrange(5)])
 

	
 
    def test_size(self):
 
        tip = self.repo.get_changeset()
 
        size = 5 * len('Foobar N') # Size of 5 files
 
        self.assertEqual(tip.size, size)
 

	
 
    def test_author(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.author, u'Joe Doe <joe.doe@example.com>')
 

	
 
    def test_author_name(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.author_name, u'Joe Doe')
 

	
 
    def test_author_email(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip.author_email, u'joe.doe@example.com')
 

	
 
    def test_get_changesets_raise_changesetdoesnotexist_for_wrong_start(self):
 
        with self.assertRaises(ChangesetDoesNotExistError):
 
            list(self.repo.get_changesets(start='foobar'))
 

	
 
    def test_get_changesets_raise_changesetdoesnotexist_for_wrong_end(self):
 
        with self.assertRaises(ChangesetDoesNotExistError):
 
            list(self.repo.get_changesets(end='foobar'))
 

	
 
    def test_get_changesets_raise_branchdoesnotexist_for_wrong_branch_name(self):
 
        with self.assertRaises(BranchDoesNotExistError):
 
            list(self.repo.get_changesets(branch_name='foobar'))
 

	
 
    def test_get_changesets_raise_repositoryerror_for_wrong_start_end(self):
 
        start = self.repo.revisions[-1]
 
        end = self.repo.revisions[0]
 
        with self.assertRaises(RepositoryError):
 
            list(self.repo.get_changesets(start=start, end=end))
 

	
 
    def test_get_changesets_numerical_id_reversed(self):
 
        with self.assertRaises(RepositoryError):
 
            [x for x in self.repo.get_changesets(start=3, end=2)]
 

	
 
    def test_get_changesets_numerical_id_respects_both_start_and_end_last(self):
 
        with self.assertRaises(RepositoryError):
 
            last = len(self.repo.revisions)
 
            list(self.repo.get_changesets(start=last-1, end=last-2))
 

	
 
    def test_get_changesets_numerical_id_last_zero_error(self):
 
        with self.assertRaises(RepositoryError):
 
            last = len(self.repo.revisions)
 
            list(self.repo.get_changesets(start=last-1, end=0))
 

	
 

	
 
class _ChangesetsChangesTestCaseMixin(_BackendTestMixin):
 
    recreate_repo_per_test = False
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return [
 
            {
 
                'message': u'Initial',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foo/bar', content='foo'),
 
                    FileNode('foo/bał', content='foo'),
 
                    FileNode('foobar', content='foo'),
 
                    FileNode('qwe', content='foo'),
 
                ],
 
            },
 
            {
 
                'message': u'Massive changes',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'added': [FileNode('fallout', content='War never changes')],
 
                'changed': [
 
                    FileNode('foo/bar', content='baz'),
 
                    FileNode('foobar', content='baz'),
 
                ],
 
                'removed': [FileNode('qwe')],
 
            },
 
        ]
 

	
 
    def test_initial_commit(self):
 
        changeset = self.repo.get_changeset(0)
 
        self.assertItemsEqual(changeset.added, [
 
            changeset.get_node('foo/bar'),
 
            changeset.get_node('foo/bał'),
 
            changeset.get_node('foobar'),
 
            changeset.get_node('qwe'),
 
        ])
 
        self.assertItemsEqual(changeset.changed, [])
 
        self.assertItemsEqual(changeset.removed, [])
 

	
 
    def test_head_added(self):
 
        changeset = self.repo.get_changeset()
 
        self.assertTrue(isinstance(changeset.added, AddedFileNodesGenerator))
 
        self.assertItemsEqual(changeset.added, [
 
            changeset.get_node('fallout'),
 
        ])
 
        self.assertTrue(isinstance(changeset.changed, ChangedFileNodesGenerator))
 
        self.assertItemsEqual(changeset.changed, [
 
            changeset.get_node('foo/bar'),
 
            changeset.get_node('foobar'),
 
        ])
 
        self.assertTrue(isinstance(changeset.removed, RemovedFileNodesGenerator))
 
        self.assertEqual(len(changeset.removed), 1)
 
        self.assertEqual(list(changeset.removed)[0].path, 'qwe')
 

	
 
    def test_get_filemode(self):
 
        changeset = self.repo.get_changeset()
 
        self.assertEqual(33188, changeset.get_file_mode('foo/bar'))
 

	
 
    def test_get_filemode_non_ascii(self):
 
        changeset = self.repo.get_changeset()
 
        self.assertEqual(33188, changeset.get_file_mode('foo/bał'))
 
        self.assertEqual(33188, changeset.get_file_mode(u'foo/bał'))
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    # tests with additional commits
 
    cls_name = ''.join(('%s changesets with commits test' % alias).title().split())
 
    bases = (_ChangesetsWithCommitsTestCaseixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
    # tests without additional commits
 
    cls_name = ''.join(('%s changesets test' % alias).title().split())
 
    bases = (_ChangesetsTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
    # tests changes
 
    cls_name = ''.join(('%s changesets changes test' % alias).title().split())
 
    bases = (_ChangesetsChangesTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_filenodes_unicode_path.py
Show inline comments
 
# encoding: utf8
 

	
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.test_inmemchangesets import BackendBaseTestCase
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 

	
 
class FileNodeUnicodePathTestsMixin(object):
 

	
 
    fname = 'ąśðąęłąć.txt'
 
    ufname = (fname).decode('utf-8')
 

	
 
    def get_commits(self):
 
        self.nodes = [
 
            FileNode(self.fname, content='Foobar'),
 
        ]
 

	
 
        commits = [
 
            {
 
                'message': 'Initial commit',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': self.nodes,
 
            },
 
        ]
 
        return commits
 

	
 
    def test_filenode_path(self):
 
        node = self.tip.get_node(self.fname)
 
        unode = self.tip.get_node(self.ufname)
 
        self.assertEqual(node, unode)
 

	
 

	
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s file node unicode path test' % alias).title()
 
        .split())
 
    bases = (FileNodeUnicodePathTestsMixin, BackendBaseTestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_getitem.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class GetitemTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test__getitem__last_item_is_tip(self):
 
        self.assertEqual(self.repo[-1], self.repo.get_changeset())
 

	
 
    def test__getitem__returns_correct_items(self):
 
        changesets = [self.repo[x] for x in xrange(len(self.repo.revisions))]
 
        self.assertEqual(changesets, list(self.repo.get_changesets()))
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s getitem test' % alias).title().split())
 
    bases = (GetitemTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_getslice.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class GetsliceTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        start_date = datetime.datetime(2010, 1, 1, 20)
 
        for x in xrange(5):
 
            yield {
 
                'message': 'Commit %d' % x,
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': start_date + datetime.timedelta(hours=12 * x),
 
                'added': [
 
                    FileNode('file_%d.txt' % x, content='Foobar %d' % x),
 
                ],
 
            }
 

	
 
    def test__getslice__last_item_is_tip(self):
 
        self.assertEqual(list(self.repo[-1:])[0], self.repo.get_changeset())
 

	
 
    def test__getslice__respects_start_index(self):
 
        self.assertEqual(list(self.repo[2:]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[2:]])
 

	
 
    def test__getslice__respects_negative_start_index(self):
 
        self.assertEqual(list(self.repo[-2:]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[-2:]])
 

	
 
    def test__getslice__respects_end_index(self):
 
        self.assertEqual(list(self.repo[:2]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[:2]])
 

	
 
    def test__getslice__respects_negative_end_index(self):
 
        self.assertEqual(list(self.repo[:-2]),
 
            [self.repo.get_changeset(rev) for rev in self.repo.revisions[:-2]])
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s getslice test' % alias).title().split())
 
    bases = (GetsliceTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_git.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import mock
 
import datetime
 
import urllib2
 
from kallithea.lib.vcs.backends.git import GitRepository, GitChangeset
 
from kallithea.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
 
from kallithea.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.model.scm import ScmModel
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
 

	
 

	
 
class GitRepositoryTest(unittest.TestCase):
 

	
 
    def __check_for_existing_repo(self):
 
        if os.path.exists(TEST_GIT_REPO_CLONE):
 
            self.fail('Cannot test git clone repo as location %s already '
 
                      'exists. You should manually remove it first.'
 
                      % TEST_GIT_REPO_CLONE)
 

	
 
    def setUp(self):
 
        self.repo = GitRepository(TEST_GIT_REPO)
 

	
 
    def test_wrong_repo_path(self):
 
        wrong_repo_path = '/tmp/errorrepo'
 
        self.assertRaises(RepositoryError, GitRepository, wrong_repo_path)
 

	
 
    def test_git_cmd_injection(self):
 
        repo_inject_path = TEST_GIT_REPO + '; echo "Cake";'
 
        with self.assertRaises(urllib2.URLError):
 
            # Should fail because URL will contain the parts after ; too
 
            urlerror_fail_repo = GitRepository(get_new_dir('injection-repo'), src_url=repo_inject_path, update_after_clone=True, create=True)
 

	
 
        with self.assertRaises(RepositoryError):
 
            # Should fail on direct clone call, which as of this writing does not happen outside of class
 
            clone_fail_repo = GitRepository(get_new_dir('injection-repo'), create=True)
 
            clone_fail_repo.clone(repo_inject_path, update_after_clone=True,)
 

	
 
        # Verify correct quoting of evil characters that should work on posix file systems
 
        if sys.platform == 'win32':
 
            # windows does not allow '"' in dir names
 
            tricky_path = get_new_dir("tricky-path-repo-$'`")
 
        else:
 
            tricky_path = get_new_dir("tricky-path-repo-$'\"`")
 
        successfully_cloned = GitRepository(tricky_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
 
        # Repo should have been created
 
        self.assertFalse(successfully_cloned._repo.bare)
 

	
 
        if sys.platform == 'win32':
 
            # windows does not allow '"' in dir names
 
            tricky_path_2 = get_new_dir("tricky-path-2-repo-$'`")
 
        else:
 
            tricky_path_2 = get_new_dir("tricky-path-2-repo-$'\"`")
 
        successfully_cloned2 = GitRepository(tricky_path_2, src_url=tricky_path, bare=True, create=True)
 
        # Repo should have been created and thus used correct quoting for clone
 
        self.assertTrue(successfully_cloned2._repo.bare)
 

	
 
        # Should pass because URL has been properly quoted
 
        successfully_cloned.pull(tricky_path_2)
 
        successfully_cloned2.fetch(tricky_path)
 

	
 
    def test_repo_create_with_spaces_in_path(self):
 
        repo_path = get_new_dir("path with spaces")
 
        repo = GitRepository(repo_path, src_url=None, bare=True, create=True)
 
        # Repo should have been created
 
        self.assertTrue(repo._repo.bare)
 

	
 
    def test_repo_clone(self):
 
        self.__check_for_existing_repo()
 
        repo = GitRepository(TEST_GIT_REPO)
 
        repo_clone = GitRepository(TEST_GIT_REPO_CLONE,
 
            src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 
        # Checking hashes of changesets should be enough
 
        for changeset in repo.get_changesets():
 
            raw_id = changeset.raw_id
 
            self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
 

	
 
    def test_repo_clone_with_spaces_in_path(self):
 
        repo_path = get_new_dir("path with spaces")
 
        successfully_cloned = GitRepository(repo_path, src_url=TEST_GIT_REPO, update_after_clone=True, create=True)
 
        # Repo should have been created
 
        self.assertFalse(successfully_cloned._repo.bare)
 

	
 
        successfully_cloned.pull(TEST_GIT_REPO)
 
        self.repo.fetch(repo_path)
 

	
 
    def test_repo_clone_without_create(self):
 
        self.assertRaises(RepositoryError, GitRepository,
 
            TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
 

	
 
    def test_repo_clone_with_update(self):
 
        repo = GitRepository(TEST_GIT_REPO)
 
        clone_path = TEST_GIT_REPO_CLONE + '_with_update'
 
        repo_clone = GitRepository(clone_path,
 
            create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 

	
 
        #check if current workdir was updated
 
        fpath = os.path.join(clone_path, 'MANIFEST.in')
 
        self.assertEqual(True, os.path.isfile(fpath),
 
            'Repo was cloned and updated but file %s could not be found'
 
            % fpath)
 

	
 
    def test_repo_clone_without_update(self):
 
        repo = GitRepository(TEST_GIT_REPO)
 
        clone_path = TEST_GIT_REPO_CLONE + '_without_update'
 
        repo_clone = GitRepository(clone_path,
 
            create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 
        #check if current workdir was *NOT* updated
 
        fpath = os.path.join(clone_path, 'MANIFEST.in')
 
        # Make sure it's not bare repo
 
        self.assertFalse(repo_clone._repo.bare)
 
        self.assertEqual(False, os.path.isfile(fpath),
 
            'Repo was cloned and updated but file %s was found'
 
            % fpath)
 

	
 
    def test_repo_clone_into_bare_repo(self):
 
        repo = GitRepository(TEST_GIT_REPO)
 
        clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
 
        repo_clone = GitRepository(clone_path, create=True,
 
            src_url=repo.path, bare=True)
 
        self.assertTrue(repo_clone._repo.bare)
 

	
 
    def test_create_repo_is_not_bare_by_default(self):
 
        repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
 
        self.assertFalse(repo._repo.bare)
 

	
 
    def test_create_bare_repo(self):
 
        repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
 
        self.assertTrue(repo._repo.bare)
 

	
 
    def test_revisions(self):
 
        # there are 112 revisions (by now)
 
        # so we can assume they would be available from now on
 
        subset = set([
 
            'c1214f7e79e02fc37156ff215cd71275450cffc3',
 
            '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
 
            'fa6600f6848800641328adbf7811fd2372c02ab2',
 
            '102607b09cdd60e2793929c4f90478be29f85a17',
 
            '49d3fd156b6f7db46313fac355dca1a0b94a0017',
 
            '2d1028c054665b962fa3d307adfc923ddd528038',
 
            'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
 
            'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
 
            'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
 
            '8430a588b43b5d6da365400117c89400326e7992',
 
            'd955cd312c17b02143c04fa1099a352b04368118',
 
            'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
 
            'add63e382e4aabc9e1afdc4bdc24506c269b7618',
 
            'f298fe1189f1b69779a4423f40b48edf92a703fc',
 
            'bd9b619eb41994cac43d67cf4ccc8399c1125808',
 
            '6e125e7c890379446e98980d8ed60fba87d0f6d1',
 
            'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
 
            '0b05e4ed56c802098dfc813cbe779b2f49e92500',
 
            '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
 
            '45223f8f114c64bf4d6f853e3c35a369a6305520',
 
            'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
 
            'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
 
            '27d48942240f5b91dfda77accd2caac94708cc7d',
 
            '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
 
            'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
 
        self.assertTrue(subset.issubset(set(self.repo.revisions)))
 

	
 

	
 

	
 
    def test_slicing(self):
 
        #4 1 5 10 95
 
        for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
 
                                 (10, 20, 10), (5, 100, 95)]:
 
            revs = list(self.repo[sfrom:sto])
 
            self.assertEqual(len(revs), size)
 
            self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
 
            self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
 

	
 

	
 
    def test_branches(self):
 
        # TODO: Need more tests here
 
        # Removed (those are 'remotes' branches for cloned repo)
 
        #self.assertTrue('master' in self.repo.branches)
 
        #self.assertTrue('gittree' in self.repo.branches)
 
        #self.assertTrue('web-branch' in self.repo.branches)
 
        for name, id in self.repo.branches.items():
 
            self.assertTrue(isinstance(
 
                self.repo.get_changeset(id), GitChangeset))
 

	
 
    def test_tags(self):
 
        # TODO: Need more tests here
 
        self.assertTrue('v0.1.1' in self.repo.tags)
 
        self.assertTrue('v0.1.2' in self.repo.tags)
 
        for name, id in self.repo.tags.items():
 
            self.assertTrue(isinstance(
 
                self.repo.get_changeset(id), GitChangeset))
 

	
 
    def _test_single_changeset_cache(self, revision):
 
        chset = self.repo.get_changeset(revision)
 
        self.assertTrue(revision in self.repo.changesets)
 
        self.assertTrue(chset is self.repo.changesets[revision])
 

	
 
    def test_initial_changeset(self):
 
        id = self.repo.revisions[0]
 
        init_chset = self.repo.get_changeset(id)
 
        self.assertEqual(init_chset.message, 'initial import\n')
 
        self.assertEqual(init_chset.author,
 
            'Marcin Kuzminski <marcin@python-blog.com>')
 
        for path in ('vcs/__init__.py',
 
                     'vcs/backends/BaseRepository.py',
 
                     'vcs/backends/__init__.py'):
 
            self.assertTrue(isinstance(init_chset.get_node(path), FileNode))
 
        for path in ('', 'vcs', 'vcs/backends'):
 
            self.assertTrue(isinstance(init_chset.get_node(path), DirNode))
 

	
 
        self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
 

	
 
        node = init_chset.get_node('vcs/')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.DIR)
 

	
 
        node = init_chset.get_node('vcs')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.DIR)
 

	
 
        node = init_chset.get_node('vcs/__init__.py')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.FILE)
 

	
 
    def test_not_existing_changeset(self):
 
        self.assertRaises(RepositoryError, self.repo.get_changeset,
 
            'f' * 40)
 

	
 
    def test_changeset10(self):
 

	
 
        chset10 = self.repo.get_changeset(self.repo.revisions[9])
 
        README = """===
 
VCS
 
===
 

	
 
Various Version Control System management abstraction layer for Python.
 

	
 
Introduction
 
------------
 

	
 
TODO: To be written...
 

	
 
"""
 
        node = chset10.get_node('README.rst')
 
        self.assertEqual(node.kind, NodeKind.FILE)
 
        self.assertEqual(node.content, README)
 

	
 

	
 
class GitChangesetTest(unittest.TestCase):
 

	
 
    def setUp(self):
 
        self.repo = GitRepository(TEST_GIT_REPO)
 

	
 
    def test_default_changeset(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(tip, self.repo.get_changeset(None))
 
        self.assertEqual(tip, self.repo.get_changeset('tip'))
 

	
 
    def test_root_node(self):
 
        tip = self.repo.get_changeset()
 
        self.assertTrue(tip.root is tip.get_node(''))
 

	
 
    def test_lazy_fetch(self):
 
        """
 
        Test if changeset's nodes expands and are cached as we walk through
 
        the revision. This test is somewhat hard to write as order of tests
 
        is a key here. Written by running command after command in a shell.
 
        """
 
        hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
 
        self.assertTrue(hex in self.repo.revisions)
 
        chset = self.repo.get_changeset(hex)
 
        self.assertTrue(len(chset.nodes) == 0)
 
        root = chset.root
 
        self.assertTrue(len(chset.nodes) == 1)
 
        self.assertTrue(len(root.nodes) == 8)
 
        # accessing root.nodes updates chset.nodes
 
        self.assertTrue(len(chset.nodes) == 9)
 

	
 
        docs = root.get_node('docs')
 
        # we haven't yet accessed anything new as docs dir was already cached
 
        self.assertTrue(len(chset.nodes) == 9)
 
        self.assertTrue(len(docs.nodes) == 8)
 
        # accessing docs.nodes updates chset.nodes
 
        self.assertTrue(len(chset.nodes) == 17)
 

	
 
        self.assertTrue(docs is chset.get_node('docs'))
 
        self.assertTrue(docs is root.nodes[0])
 
        self.assertTrue(docs is root.dirs[0])
 
        self.assertTrue(docs is chset.get_node('docs'))
 

	
 
    def test_nodes_with_changeset(self):
 
        hex = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
 
        chset = self.repo.get_changeset(hex)
 
        root = chset.root
 
        docs = root.get_node('docs')
 
        self.assertTrue(docs is chset.get_node('docs'))
 
        api = docs.get_node('api')
 
        self.assertTrue(api is chset.get_node('docs/api'))
 
        index = api.get_node('index.rst')
 
        self.assertTrue(index is chset.get_node('docs/api/index.rst'))
 
        self.assertTrue(index is chset.get_node('docs')\
 
            .get_node('api')\
 
            .get_node('index.rst'))
 

	
 
    def test_branch_and_tags(self):
 
        """
 
        rev0 = self.repo.revisions[0]
 
        chset0 = self.repo.get_changeset(rev0)
 
        self.assertEqual(chset0.branch, 'master')
 
        self.assertEqual(chset0.tags, [])
 

	
 
        rev10 = self.repo.revisions[10]
 
        chset10 = self.repo.get_changeset(rev10)
 
        self.assertEqual(chset10.branch, 'master')
 
        self.assertEqual(chset10.tags, [])
 

	
 
        rev44 = self.repo.revisions[44]
 
        chset44 = self.repo.get_changeset(rev44)
 
        self.assertEqual(chset44.branch, 'web-branch')
 

	
 
        tip = self.repo.get_changeset('tip')
 
        self.assertTrue('tip' in tip.tags)
 
        """
 
        # Those tests would fail - branches are now going
 
        # to be changed at main API in order to support git backend
 
        pass
 

	
 
    def _test_slices(self, limit, offset):
 
        count = self.repo.count()
 
        changesets = self.repo.get_changesets(limit=limit, offset=offset)
 
        idx = 0
 
        for changeset in changesets:
 
            rev = offset + idx
 
            idx += 1
 
            rev_id = self.repo.revisions[rev]
 
            if idx > limit:
 
                self.fail("Exceeded limit already (getting revision %s, "
 
                    "there are %s total revisions, offset=%s, limit=%s)"
 
                    % (rev_id, count, offset, limit))
 
            self.assertEqual(changeset, self.repo.get_changeset(rev_id))
 
        result = list(self.repo.get_changesets(limit=limit, offset=offset))
 
        start = offset
 
        end = limit and offset + limit or None
 
        sliced = list(self.repo[start:end])
 
        self.failUnlessEqual(result, sliced,
 
            msg="Comparison failed for limit=%s, offset=%s"
 
            "(get_changeset returned: %s and sliced: %s"
 
            % (limit, offset, result, sliced))
 

	
 
    def _test_file_size(self, revision, path, size):
 
        node = self.repo.get_changeset(revision).get_node(path)
 
        self.assertTrue(node.is_file())
 
        self.assertEqual(node.size, size)
 

	
 
    def test_file_size(self):
 
        to_check = (
 
            ('c1214f7e79e02fc37156ff215cd71275450cffc3',
 
                'vcs/backends/BaseRepository.py', 502),
 
            ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
 
                'vcs/backends/hg.py', 854),
 
            ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
 
                'setup.py', 1068),
 

	
 
            ('d955cd312c17b02143c04fa1099a352b04368118',
 
                'vcs/backends/base.py', 2921),
 
            ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
 
                'vcs/backends/base.py', 3936),
 
            ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
 
                'vcs/backends/base.py', 6189),
 
        )
 
        for revision, path, size in to_check:
 
            self._test_file_size(revision, path, size)
 

	
 
    def test_file_history(self):
 
        # we can only check if those revisions are present in the history
 
        # as we cannot update this test every time file is changed
 
        files = {
 
            'setup.py': [
 
                '54386793436c938cff89326944d4c2702340037d',
 
                '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
 
                '998ed409c795fec2012b1c0ca054d99888b22090',
 
                '5e0eb4c47f56564395f76333f319d26c79e2fb09',
 
                '0115510b70c7229dbc5dc49036b32e7d91d23acd',
 
                '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
 
                '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
 
                '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
 
                'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
 
            ],
 
            'vcs/nodes.py': [
 
                '33fa3223355104431402a888fa77a4e9956feb3e',
 
                'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
 
                'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
 
                'ab5721ca0a081f26bf43d9051e615af2cc99952f',
 
                'c877b68d18e792a66b7f4c529ea02c8f80801542',
 
                '4313566d2e417cb382948f8d9d7c765330356054',
 
                '6c2303a793671e807d1cfc70134c9ca0767d98c2',
 
                '54386793436c938cff89326944d4c2702340037d',
 
                '54000345d2e78b03a99d561399e8e548de3f3203',
 
                '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
 
                '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
 
                '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
 
                '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
 
                'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
 
                '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
 
                '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
 
                '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
 
                '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
 
                'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
 
                'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
 
                'f15c21f97864b4f071cddfbf2750ec2e23859414',
 
                'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
 
                'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
 
                '84dec09632a4458f79f50ddbbd155506c460b4f9',
 
                '0115510b70c7229dbc5dc49036b32e7d91d23acd',
 
                '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
 
                '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
 
                'b8d04012574729d2c29886e53b1a43ef16dd00a1',
 
                '6970b057cffe4aab0a792aa634c89f4bebf01441',
 
                'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
 
                'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
 
            ],
 
            'vcs/backends/git.py': [
 
                '4cf116ad5a457530381135e2f4c453e68a1b0105',
 
                '9a751d84d8e9408e736329767387f41b36935153',
 
                'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
 
                '428f81bb652bcba8d631bce926e8834ff49bdcc6',
 
                '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
 
                '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
 
                '50e08c506174d8645a4bb517dd122ac946a0f3bf',
 
                '54000345d2e78b03a99d561399e8e548de3f3203',
 
            ],
 
        }
 
        for path, revs in files.items():
 
            node = self.repo.get_changeset(revs[0]).get_node(path)
 
            node_revs = [chset.raw_id for chset in node.history]
 
            self.assertTrue(set(revs).issubset(set(node_revs)),
 
                "We assumed that %s is subset of revisions for which file %s "
 
                "has been changed, and history of that node returned: %s"
 
                % (revs, path, node_revs))
 

	
 
    def test_file_annotate(self):
 
        files = {
 
            'vcs/backends/__init__.py': {
 
                'c1214f7e79e02fc37156ff215cd71275450cffc3': {
 
                    'lines_no': 1,
 
                    'changesets': [
 
                        'c1214f7e79e02fc37156ff215cd71275450cffc3',
 
                    ],
 
                },
 
                '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
 
                    'lines_no': 21,
 
                    'changesets': [
 
                        '49d3fd156b6f7db46313fac355dca1a0b94a0017',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                    ],
 
                },
 
                'e29b67bd158580fc90fc5e9111240b90e6e86064': {
 
                    'lines_no': 32,
 
                    'changesets': [
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '54000345d2e78b03a99d561399e8e548de3f3203',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '78c3f0c23b7ee935ec276acb8b8212444c33c396',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '78c3f0c23b7ee935ec276acb8b8212444c33c396',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '992f38217b979d0b0987d0bae3cc26dac85d9b19',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                        '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
 
                    ],
 
                },
 
            },
 
        }
 

	
 
        for fname, revision_dict in files.items():
 
            for rev, data in revision_dict.items():
 
                cs = self.repo.get_changeset(rev)
 

	
 
                l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
 
                l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
 
                self.assertEqual(l1_1, l1_2)
 
                l1 = l1_1
 
                l2 = files[fname][rev]['changesets']
 
                self.assertTrue(l1 == l2 , "The lists of revision for %s@rev %s"
 
                                "from annotation list should match each other, "
 
                                "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
 

	
 
    def test_files_state(self):
 
        """
 
        Tests state of FileNodes.
 
        """
 
        node = self.repo\
 
            .get_changeset('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
 
            .get_node('vcs/utils/diffs.py')
 
        self.assertTrue(node.state, NodeState.ADDED)
 
        self.assertTrue(node.added)
 
        self.assertFalse(node.changed)
 
        self.assertFalse(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        node = self.repo\
 
            .get_changeset('33fa3223355104431402a888fa77a4e9956feb3e')\
 
            .get_node('.hgignore')
 
        self.assertTrue(node.state, NodeState.CHANGED)
 
        self.assertFalse(node.added)
 
        self.assertTrue(node.changed)
 
        self.assertFalse(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        node = self.repo\
 
            .get_changeset('e29b67bd158580fc90fc5e9111240b90e6e86064')\
 
            .get_node('setup.py')
 
        self.assertTrue(node.state, NodeState.NOT_CHANGED)
 
        self.assertFalse(node.added)
 
        self.assertFalse(node.changed)
 
        self.assertTrue(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        # If node has REMOVED state then trying to fetch it would raise
 
        # ChangesetError exception
 
        chset = self.repo.get_changeset(
 
            'fa6600f6848800641328adbf7811fd2372c02ab2')
 
        path = 'vcs/backends/BaseRepository.py'
 
        self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
 
        # but it would be one of ``removed`` (changeset's attribute)
 
        self.assertTrue(path in [rf.path for rf in chset.removed])
 

	
 
        chset = self.repo.get_changeset(
 
            '54386793436c938cff89326944d4c2702340037d')
 
        changed = ['setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
 
            'vcs/nodes.py']
 
        self.assertEqual(set(changed), set([f.path for f in chset.changed]))
 

	
 
    def test_commit_message_is_unicode(self):
 
        for cs in self.repo:
 
            self.assertEqual(type(cs.message), unicode)
 

	
 
    def test_changeset_author_is_unicode(self):
 
        for cs in self.repo:
 
            self.assertEqual(type(cs.author), unicode)
 

	
 
    def test_repo_files_content_is_unicode(self):
 
        changeset = self.repo.get_changeset()
 
        for node in changeset.get_node('/'):
 
            if node.is_file():
 
                self.assertEqual(type(node.content), unicode)
 

	
 
    def test_wrong_path(self):
 
        # There is 'setup.py' in the root dir but not there:
 
        path = 'foo/bar/setup.py'
 
        tip = self.repo.get_changeset()
 
        self.assertRaises(VCSError, tip.get_node, path)
 

	
 
    def test_author_email(self):
 
        self.assertEqual('marcin@python-blog.com',
 
          self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
 
          .author_email)
 
        self.assertEqual('lukasz.balcerzak@python-center.pl',
 
          self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
 
          .author_email)
 
        self.assertEqual('none@none',
 
          self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
 
          .author_email)
 

	
 
    def test_author_username(self):
 
        self.assertEqual('Marcin Kuzminski',
 
          self.repo.get_changeset('c1214f7e79e02fc37156ff215cd71275450cffc3')\
 
          .author_name)
 
        self.assertEqual('Lukasz Balcerzak',
 
          self.repo.get_changeset('ff7ca51e58c505fec0dd2491de52c622bb7a806b')\
 
          .author_name)
 
        self.assertEqual('marcink',
 
          self.repo.get_changeset('8430a588b43b5d6da365400117c89400326e7992')\
 
          .author_name)
 

	
 

	
 
class GitSpecificTest(unittest.TestCase):
 

	
 
    def test_error_is_raised_for_added_if_diff_name_status_is_wrong(self):
 
        repo = mock.MagicMock()
 
        changeset = GitChangeset(repo, 'foobar')
 
        changeset._diff_name_status = 'foobar'
 
        with self.assertRaises(VCSError):
 
            changeset.added
 

	
 
    def test_error_is_raised_for_changed_if_diff_name_status_is_wrong(self):
 
        repo = mock.MagicMock()
 
        changeset = GitChangeset(repo, 'foobar')
 
        changeset._diff_name_status = 'foobar'
 
        with self.assertRaises(VCSError):
 
            changeset.added
 

	
 
    def test_error_is_raised_for_removed_if_diff_name_status_is_wrong(self):
 
        repo = mock.MagicMock()
 
        changeset = GitChangeset(repo, 'foobar')
 
        changeset._diff_name_status = 'foobar'
 
        with self.assertRaises(VCSError):
 
            changeset.added
 

	
 

	
 
class GitSpecificWithRepoTest(_BackendTestMixin, unittest.TestCase):
 
    backend_alias = 'git'
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return [
 
            {
 
                'message': 'Initial',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar/static/js/admin/base.js', content='base'),
 
                    FileNode('foobar/static/admin', content='admin',
 
                        mode=0120000), # this is a link
 
                    FileNode('foo', content='foo'),
 
                ],
 
            },
 
            {
 
                'message': 'Second',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'added': [
 
                    FileNode('foo2', content='foo2'),
 
                ],
 
            },
 
        ]
 

	
 
    def test_paths_slow_traversing(self):
 
        cs = self.repo.get_changeset()
 
        self.assertEqual(cs.get_node('foobar').get_node('static').get_node('js')
 
            .get_node('admin').get_node('base.js').content, 'base')
 

	
 
    def test_paths_fast_traversing(self):
 
        cs = self.repo.get_changeset()
 
        self.assertEqual(cs.get_node('foobar/static/js/admin/base.js').content,
 
            'base')
 

	
 
    def test_workdir_get_branch(self):
 
        self.repo.run_git_command(['checkout', '-b', 'production'])
 
        # Regression test: one of following would fail if we don't check
 
        # .git/HEAD file
 
        self.repo.run_git_command(['checkout', 'production'])
 
        self.assertEqual(self.repo.workdir.get_branch(), 'production')
 
        self.repo.run_git_command(['checkout', 'master'])
 
        self.assertEqual(self.repo.workdir.get_branch(), 'master')
 

	
 
    def test_get_diff_runs_git_command_with_hashes(self):
 
        self.repo.run_git_command = mock.Mock(return_value=['', ''])
 
        self.repo.get_diff(0, 1)
 
        self.repo.run_git_command.assert_called_once_with(
 
            ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
 
             self.repo._get_revision(0), self.repo._get_revision(1)])
 

	
 
    def test_get_diff_runs_git_command_with_str_hashes(self):
 
        self.repo.run_git_command = mock.Mock(return_value=['', ''])
 
        self.repo.get_diff(self.repo.EMPTY_CHANGESET, 1)
 
        self.repo.run_git_command.assert_called_once_with(
 
            ['show', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
 
             self.repo._get_revision(1)])
 

	
 
    def test_get_diff_runs_git_command_with_path_if_its_given(self):
 
        self.repo.run_git_command = mock.Mock(return_value=['', ''])
 
        self.repo.get_diff(0, 1, 'foo')
 
        self.repo.run_git_command.assert_called_once_with(
 
            ['diff', '-U3', '--full-index', '--binary', '-p', '-M', '--abbrev=40',
 
             self.repo._get_revision(0), self.repo._get_revision(1), '--', 'foo'])
 

	
 

	
 
class GitRegressionTest(_BackendTestMixin, unittest.TestCase):
 
    backend_alias = 'git'
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return [
 
            {
 
                'message': 'Initial',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('bot/__init__.py', content='base'),
 
                    FileNode('bot/templates/404.html', content='base'),
 
                    FileNode('bot/templates/500.html', content='base'),
 
                ],
 
            },
 
            {
 
                'message': 'Second',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'added': [
 
                    FileNode('bot/build/migrations/1.py', content='foo2'),
 
                    FileNode('bot/build/migrations/2.py', content='foo2'),
 
                    FileNode('bot/build/static/templates/f.html', content='foo2'),
 
                    FileNode('bot/build/static/templates/f1.html', content='foo2'),
 
                    FileNode('bot/build/templates/err.html', content='foo2'),
 
                    FileNode('bot/build/templates/err2.html', content='foo2'),
 
                ],
 
            },
 
        ]
 

	
 
    def test_similar_paths(self):
 
        cs = self.repo.get_changeset()
 
        paths = lambda *n:[x.path for x in n]
 
        self.assertEqual(paths(*cs.get_nodes('bot')), ['bot/build', 'bot/templates', 'bot/__init__.py'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/build')), ['bot/build/migrations', 'bot/build/static', 'bot/build/templates'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/build/static')), ['bot/build/static/templates'])
 
        # this get_nodes below causes troubles !
 
        self.assertEqual(paths(*cs.get_nodes('bot/build/static/templates')), ['bot/build/static/templates/f.html', 'bot/build/static/templates/f1.html'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/build/templates')), ['bot/build/templates/err.html', 'bot/build/templates/err2.html'])
 
        self.assertEqual(paths(*cs.get_nodes('bot/templates/')), ['bot/templates/404.html', 'bot/templates/500.html'])
 

	
 

	
 
class GitHooksTest(unittest.TestCase):
 
    """
 
    Tests related to hook functionality of Git repositories.
 
    """
 

	
 
    def setUp(self):
 
        # For each run we want a fresh repo.
 
        self.repo_directory = get_new_dir("githookrepo")
 
        self.repo = GitRepository(self.repo_directory, create=True)
 

	
 
        # Create a dictionary where keys are hook names, and values are paths to
 
        # them. Deduplicates code in tests a bit.
 
        self.hook_directory = self.repo.get_hook_location()
 
        self.kallithea_hooks = {h: os.path.join(self.hook_directory, h) for h in ("pre-receive", "post-receive")}
 

	
 
    def test_hooks_created_if_missing(self):
 
        """
 
        Tests if hooks are installed in repository if they are missing.
 
        """
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            if os.path.exists(hook_path):
 
                os.remove(hook_path)
 

	
 
        ScmModel().install_git_hooks(repo=self.repo)
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            self.assertTrue(os.path.exists(hook_path))
 

	
 
    def test_kallithea_hooks_updated(self):
 
        """
 
        Tests if hooks are updated if they are Kallithea hooks already.
 
        """
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            with open(hook_path, "w") as f:
 
                f.write("KALLITHEA_HOOK_VER=0.0.0\nJUST_BOGUS")
 

	
 
        ScmModel().install_git_hooks(repo=self.repo)
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            with open(hook_path) as f:
 
                self.assertNotIn("JUST_BOGUS", f.read())
 

	
 
    def test_custom_hooks_untouched(self):
 
        """
 
        Tests if hooks are left untouched if they are not Kallithea hooks.
 
        """
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            with open(hook_path, "w") as f:
 
                f.write("#!/bin/bash\n#CUSTOM_HOOK")
 

	
 
        ScmModel().install_git_hooks(repo=self.repo)
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            with open(hook_path) as f:
 
                self.assertIn("CUSTOM_HOOK", f.read())
 

	
 
    def test_custom_hooks_forced_update(self):
 
        """
 
        Tests if hooks are forcefully updated even though they are custom hooks.
 
        """
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            with open(hook_path, "w") as f:
 
                f.write("#!/bin/bash\n#CUSTOM_HOOK")
 

	
 
        ScmModel().install_git_hooks(repo=self.repo, force_create=True)
 

	
 
        for hook, hook_path in self.kallithea_hooks.iteritems():
 
            with open(hook_path) as f:
 
                self.assertIn("KALLITHEA_HOOK_VER", f.read())
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_hg.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
from kallithea.lib.vcs.backends.hg import MercurialRepository, MercurialChangeset
 
from kallithea.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
 
from kallithea.lib.vcs.nodes import NodeKind, NodeState
 
from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_HG_REPO_CLONE, \
 
    TEST_HG_REPO_PULL
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
# Use only clean mercurial's ui
 
from kallithea.lib.vcs.utils.hgcompat import mercurial
 
mercurial.scmutil.rcpath()
 
if mercurial.scmutil._rcpath:
 
    mercurial.scmutil._rcpath = mercurial.scmutil._rcpath[:1]
 

	
 

	
 
class MercurialRepositoryTest(unittest.TestCase):
 

	
 
    def __check_for_existing_repo(self):
 
        if os.path.exists(TEST_HG_REPO_CLONE):
 
            self.fail('Cannot test mercurial clone repo as location %s already '
 
                      'exists. You should manually remove it first.'
 
                      % TEST_HG_REPO_CLONE)
 

	
 
    def setUp(self):
 
        self.repo = MercurialRepository(TEST_HG_REPO)
 

	
 
    def test_wrong_repo_path(self):
 
        wrong_repo_path = '/tmp/errorrepo'
 
        self.assertRaises(RepositoryError, MercurialRepository, wrong_repo_path)
 

	
 
    def test_unicode_path_repo(self):
 
        self.assertRaises(VCSError,lambda:MercurialRepository(u'iShouldFail'))
 

	
 
    def test_repo_clone(self):
 
        self.__check_for_existing_repo()
 
        repo = MercurialRepository(TEST_HG_REPO)
 
        repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
 
            src_url=TEST_HG_REPO, update_after_clone=True)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 
        # Checking hashes of changesets should be enough
 
        for changeset in repo.get_changesets():
 
            raw_id = changeset.raw_id
 
            self.assertEqual(raw_id, repo_clone.get_changeset(raw_id).raw_id)
 

	
 
    def test_repo_clone_with_update(self):
 
        repo = MercurialRepository(TEST_HG_REPO)
 
        repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_w_update',
 
            src_url=TEST_HG_REPO, update_after_clone=True)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 

	
 
        #check if current workdir was updated
 
        self.assertEqual(os.path.isfile(os.path.join(TEST_HG_REPO_CLONE \
 
                                                    + '_w_update',
 
                                                    'MANIFEST.in')), True,)
 

	
 
    def test_repo_clone_without_update(self):
 
        repo = MercurialRepository(TEST_HG_REPO)
 
        repo_clone = MercurialRepository(TEST_HG_REPO_CLONE + '_wo_update',
 
            src_url=TEST_HG_REPO, update_after_clone=False)
 
        self.assertEqual(len(repo.revisions), len(repo_clone.revisions))
 
        self.assertEqual(os.path.isfile(os.path.join(TEST_HG_REPO_CLONE \
 
                                                    + '_wo_update',
 
                                                    'MANIFEST.in')), False,)
 

	
 
    def test_pull(self):
 
        if os.path.exists(TEST_HG_REPO_PULL):
 
            self.fail('Cannot test mercurial pull command as location %s '
 
                      'already exists. You should manually remove it first'
 
                      % TEST_HG_REPO_PULL)
 
        repo_new = MercurialRepository(TEST_HG_REPO_PULL, create=True)
 
        self.assertTrue(len(self.repo.revisions) > len(repo_new.revisions))
 

	
 
        repo_new.pull(self.repo.path)
 
        repo_new = MercurialRepository(TEST_HG_REPO_PULL)
 
        self.assertTrue(len(self.repo.revisions) == len(repo_new.revisions))
 

	
 
    def test_revisions(self):
 
        # there are 21 revisions at bitbucket now
 
        # so we can assume they would be available from now on
 
        subset = set(['b986218ba1c9b0d6a259fac9b050b1724ed8e545',
 
                 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
 
                 '6cba7170863a2411822803fa77a0a264f1310b35',
 
                 '56349e29c2af3ac913b28bde9a2c6154436e615b',
 
                 '2dda4e345facb0ccff1a191052dd1606dba6781d',
 
                 '6fff84722075f1607a30f436523403845f84cd9e',
 
                 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
 
                 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
 
                 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
 
                 'be90031137367893f1c406e0a8683010fd115b79',
 
                 'db8e58be770518cbb2b1cdfa69146e47cd481481',
 
                 '84478366594b424af694a6c784cb991a16b87c21',
 
                 '17f8e105dddb9f339600389c6dc7175d395a535c',
 
                 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
 
                 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
 
                 '786facd2c61deb9cf91e9534735124fb8fc11842',
 
                 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
 
                 'aa6a0de05b7612707db567078e130a6cd114a9a7',
 
                 'eada5a770da98ab0dd7325e29d00e0714f228d09'
 
                ])
 
        self.assertTrue(subset.issubset(set(self.repo.revisions)))
 

	
 

	
 
        # check if we have the proper order of revisions
 
        org = ['b986218ba1c9b0d6a259fac9b050b1724ed8e545',
 
                '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
 
                '6cba7170863a2411822803fa77a0a264f1310b35',
 
                '56349e29c2af3ac913b28bde9a2c6154436e615b',
 
                '2dda4e345facb0ccff1a191052dd1606dba6781d',
 
                '6fff84722075f1607a30f436523403845f84cd9e',
 
                '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
 
                '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
 
                'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
 
                'be90031137367893f1c406e0a8683010fd115b79',
 
                'db8e58be770518cbb2b1cdfa69146e47cd481481',
 
                '84478366594b424af694a6c784cb991a16b87c21',
 
                '17f8e105dddb9f339600389c6dc7175d395a535c',
 
                '20a662e756499bde3095ffc9bc0643d1def2d0eb',
 
                '2e319b85e70a707bba0beff866d9f9de032aa4f9',
 
                '786facd2c61deb9cf91e9534735124fb8fc11842',
 
                '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
 
                'aa6a0de05b7612707db567078e130a6cd114a9a7',
 
                'eada5a770da98ab0dd7325e29d00e0714f228d09',
 
                '2c1885c735575ca478bf9e17b0029dca68824458',
 
                'd9bcd465040bf869799b09ad732c04e0eea99fe9',
 
                '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
 
                '4fb8326d78e5120da2c7468dcf7098997be385da',
 
                '62b4a097164940bd66030c4db51687f3ec035eed',
 
                '536c1a19428381cfea92ac44985304f6a8049569',
 
                '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
 
                '9bb326a04ae5d98d437dece54be04f830cf1edd9',
 
                'f8940bcb890a98c4702319fbe36db75ea309b475',
 
                'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
 
                '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
 
                'ee87846a61c12153b51543bf860e1026c6d3dcba', ]
 
        self.assertEqual(org, self.repo.revisions[:31])
 

	
 
    def test_iter_slice(self):
 
        sliced = list(self.repo[:10])
 
        itered = list(self.repo)[:10]
 
        self.assertEqual(sliced, itered)
 

	
 
    def test_slicing(self):
 
        #4 1 5 10 95
 
        for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
 
                                 (10, 20, 10), (5, 100, 95)]:
 
            revs = list(self.repo[sfrom:sto])
 
            self.assertEqual(len(revs), size)
 
            self.assertEqual(revs[0], self.repo.get_changeset(sfrom))
 
            self.assertEqual(revs[-1], self.repo.get_changeset(sto - 1))
 

	
 
    def test_branches(self):
 
        # TODO: Need more tests here
 

	
 
        #active branches
 
        self.assertTrue('default' in self.repo.branches)
 
        self.assertTrue('stable' in self.repo.branches)
 

	
 
        # closed
 
        self.assertTrue('git' in self.repo._get_branches(closed=True))
 
        self.assertTrue('web' in self.repo._get_branches(closed=True))
 

	
 
        for name, id in self.repo.branches.items():
 
            self.assertTrue(isinstance(
 
                self.repo.get_changeset(id), MercurialChangeset))
 

	
 
    def test_tip_in_tags(self):
 
        # tip is always a tag
 
        self.assertIn('tip', self.repo.tags)
 

	
 
    def test_tip_changeset_in_tags(self):
 
        tip = self.repo.get_changeset()
 
        self.assertEqual(self.repo.tags['tip'], tip.raw_id)
 

	
 
    def test_initial_changeset(self):
 

	
 
        init_chset = self.repo.get_changeset(0)
 
        self.assertEqual(init_chset.message, 'initial import')
 
        self.assertEqual(init_chset.author,
 
            'Marcin Kuzminski <marcin@python-blog.com>')
 
        self.assertEqual(sorted(init_chset._file_paths),
 
            sorted([
 
                'vcs/__init__.py',
 
                'vcs/backends/BaseRepository.py',
 
                'vcs/backends/__init__.py',
 
            ])
 
        )
 
        self.assertEqual(sorted(init_chset._dir_paths),
 
            sorted(['', 'vcs', 'vcs/backends']))
 

	
 
        self.assertRaises(NodeDoesNotExistError, init_chset.get_node, path='foobar')
 

	
 
        node = init_chset.get_node('vcs/')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.DIR)
 

	
 
        node = init_chset.get_node('vcs')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.DIR)
 

	
 
        node = init_chset.get_node('vcs/__init__.py')
 
        self.assertTrue(hasattr(node, 'kind'))
 
        self.assertEqual(node.kind, NodeKind.FILE)
 

	
 
    def test_not_existing_changeset(self):
 
        #rawid
 
        self.assertRaises(RepositoryError, self.repo.get_changeset,
 
            'abcd' * 10)
 
        #shortid
 
        self.assertRaises(RepositoryError, self.repo.get_changeset,
 
            'erro' * 4)
 
        #numeric
 
        self.assertRaises(RepositoryError, self.repo.get_changeset,
 
            self.repo.count() + 1)
 

	
 

	
 
        # Small chance we ever get to this one
 
        revision = pow(2, 30)
 
        self.assertRaises(RepositoryError, self.repo.get_changeset, revision)
 

	
 
    def test_changeset10(self):
 

	
 
        chset10 = self.repo.get_changeset(10)
 
        README = """===
 
VCS
 
===
 

	
 
Various Version Control System management abstraction layer for Python.
 

	
 
Introduction
 
------------
 

	
 
TODO: To be written...
 

	
 
"""
 
        node = chset10.get_node('README.rst')
 
        self.assertEqual(node.kind, NodeKind.FILE)
 
        self.assertEqual(node.content, README)
 

	
 

	
 
class MercurialChangesetTest(unittest.TestCase):
 

	
 
    def setUp(self):
 
        self.repo = MercurialRepository(TEST_HG_REPO)
 

	
 
    def _test_equality(self, changeset):
 
        revision = changeset.revision
 
        self.assertEqual(changeset, self.repo.get_changeset(revision))
 

	
 
    def test_equality(self):
 
        self.setUp()
 
        revs = [0, 10, 20]
 
        changesets = [self.repo.get_changeset(rev) for rev in revs]
 
        for changeset in changesets:
 
            self._test_equality(changeset)
 

	
 
    def test_default_changeset(self):
 
        tip = self.repo.get_changeset('tip')
 
        self.assertEqual(tip, self.repo.get_changeset())
 
        self.assertEqual(tip, self.repo.get_changeset(revision=None))
 
        self.assertEqual(tip, list(self.repo[-1:])[0])
 

	
 
    def test_root_node(self):
 
        tip = self.repo.get_changeset('tip')
 
        self.assertTrue(tip.root is tip.get_node(''))
 

	
 
    def test_lazy_fetch(self):
 
        """
 
        Test if changeset's nodes expands and are cached as we walk through
 
        the revision. This test is somewhat hard to write as order of tests
 
        is a key here. Written by running command after command in a shell.
 
        """
 
        self.setUp()
 
        chset = self.repo.get_changeset(45)
 
        self.assertTrue(len(chset.nodes) == 0)
 
        root = chset.root
 
        self.assertTrue(len(chset.nodes) == 1)
 
        self.assertTrue(len(root.nodes) == 8)
 
        # accessing root.nodes updates chset.nodes
 
        self.assertTrue(len(chset.nodes) == 9)
 

	
 
        docs = root.get_node('docs')
 
        # we haven't yet accessed anything new as docs dir was already cached
 
        self.assertTrue(len(chset.nodes) == 9)
 
        self.assertTrue(len(docs.nodes) == 8)
 
        # accessing docs.nodes updates chset.nodes
 
        self.assertTrue(len(chset.nodes) == 17)
 

	
 
        self.assertTrue(docs is chset.get_node('docs'))
 
        self.assertTrue(docs is root.nodes[0])
 
        self.assertTrue(docs is root.dirs[0])
 
        self.assertTrue(docs is chset.get_node('docs'))
 

	
 
    def test_nodes_with_changeset(self):
 
        self.setUp()
 
        chset = self.repo.get_changeset(45)
 
        root = chset.root
 
        docs = root.get_node('docs')
 
        self.assertTrue(docs is chset.get_node('docs'))
 
        api = docs.get_node('api')
 
        self.assertTrue(api is chset.get_node('docs/api'))
 
        index = api.get_node('index.rst')
 
        self.assertTrue(index is chset.get_node('docs/api/index.rst'))
 
        self.assertTrue(index is chset.get_node('docs')\
 
            .get_node('api')\
 
            .get_node('index.rst'))
 

	
 
    def test_branch_and_tags(self):
 
        chset0 = self.repo.get_changeset(0)
 
        self.assertEqual(chset0.branch, 'default')
 
        self.assertEqual(chset0.tags, [])
 

	
 
        chset10 = self.repo.get_changeset(10)
 
        self.assertEqual(chset10.branch, 'default')
 
        self.assertEqual(chset10.tags, [])
 

	
 
        chset44 = self.repo.get_changeset(44)
 
        self.assertEqual(chset44.branch, 'web')
 

	
 
        tip = self.repo.get_changeset('tip')
 
        self.assertTrue('tip' in tip.tags)
 

	
 
    def _test_file_size(self, revision, path, size):
 
        node = self.repo.get_changeset(revision).get_node(path)
 
        self.assertTrue(node.is_file())
 
        self.assertEqual(node.size, size)
 

	
 
    def test_file_size(self):
 
        to_check = (
 
            (10, 'setup.py', 1068),
 
            (20, 'setup.py', 1106),
 
            (60, 'setup.py', 1074),
 

	
 
            (10, 'vcs/backends/base.py', 2921),
 
            (20, 'vcs/backends/base.py', 3936),
 
            (60, 'vcs/backends/base.py', 6189),
 
        )
 
        for revision, path, size in to_check:
 
            self._test_file_size(revision, path, size)
 

	
 
    def test_file_history(self):
 
        # we can only check if those revisions are present in the history
 
        # as we cannot update this test every time file is changed
 
        files = {
 
            'setup.py': [7, 18, 45, 46, 47, 69, 77],
 
            'vcs/nodes.py': [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60,
 
                61, 73, 76],
 
            'vcs/backends/hg.py': [4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23,
 
                26, 27, 28, 30, 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47,
 
                48, 49, 53, 54, 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79,
 
                82],
 
        }
 
        for path, revs in files.items():
 
            tip = self.repo.get_changeset(revs[-1])
 
            node = tip.get_node(path)
 
            node_revs = [chset.revision for chset in node.history]
 
            self.assertTrue(set(revs).issubset(set(node_revs)),
 
                "We assumed that %s is subset of revisions for which file %s "
 
                "has been changed, and history of that node returned: %s"
 
                % (revs, path, node_revs))
 

	
 
    def test_file_annotate(self):
 
        files = {
 
                 'vcs/backends/__init__.py':
 
                  {89: {'lines_no': 31,
 
                        'changesets': [32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
 
                                       37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
 
                                       32, 32, 32, 32, 37, 32, 37, 37, 32,
 
                                       32, 32]},
 
                   20: {'lines_no': 1,
 
                        'changesets': [4]},
 
                   55: {'lines_no': 31,
 
                        'changesets': [32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
 
                                       37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
 
                                       32, 32, 32, 32, 37, 32, 37, 37, 32,
 
                                       32, 32]}},
 
                 'vcs/exceptions.py':
 
                 {89: {'lines_no': 18,
 
                       'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
 
                                      16, 16, 17, 16, 16, 18, 18, 18]},
 
                  20: {'lines_no': 18,
 
                       'changesets': [16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
 
                                      16, 16, 17, 16, 16, 18, 18, 18]},
 
                  55: {'lines_no': 18, 'changesets': [16, 16, 16, 16, 16, 16,
 
                                                      16, 16, 16, 16, 16, 16,
 
                                                      17, 16, 16, 18, 18, 18]}},
 
                 'MANIFEST.in': {89: {'lines_no': 5,
 
                                      'changesets': [7, 7, 7, 71, 71]},
 
                                 20: {'lines_no': 3,
 
                                      'changesets': [7, 7, 7]},
 
                                 55: {'lines_no': 3,
 
                                     'changesets': [7, 7, 7]}}}
 

	
 
        for fname, revision_dict in files.items():
 
            for rev, data in revision_dict.items():
 
                cs = self.repo.get_changeset(rev)
 
                l1_1 = [x[1] for x in cs.get_file_annotate(fname)]
 
                l1_2 = [x[2]().raw_id for x in cs.get_file_annotate(fname)]
 
                self.assertEqual(l1_1, l1_2)
 
                l1 = l1_2 = [x[2]().revision for x in cs.get_file_annotate(fname)]
 
                l2 = files[fname][rev]['changesets']
 
                self.assertTrue(l1 == l2 , "The lists of revision for %s@rev%s"
 
                                "from annotation list should match each other,"
 
                                "got \n%s \nvs \n%s " % (fname, rev, l1, l2))
 

	
 
    def test_changeset_state(self):
 
        """
 
        Tests which files have been added/changed/removed at particular revision
 
        """
 

	
 
        # rev 46ad32a4f974:
 
        # hg st --rev 46ad32a4f974
 
        #    changed: 13
 
        #    added:   20
 
        #    removed: 1
 
        changed = set(['.hgignore'
 
            , 'README.rst' , 'docs/conf.py' , 'docs/index.rst' , 'setup.py'
 
            , 'tests/test_hg.py' , 'tests/test_nodes.py' , 'vcs/__init__.py'
 
            , 'vcs/backends/__init__.py' , 'vcs/backends/base.py'
 
            , 'vcs/backends/hg.py' , 'vcs/nodes.py' , 'vcs/utils/__init__.py'])
 

	
 
        added = set(['docs/api/backends/hg.rst'
 
            , 'docs/api/backends/index.rst' , 'docs/api/index.rst'
 
            , 'docs/api/nodes.rst' , 'docs/api/web/index.rst'
 
            , 'docs/api/web/simplevcs.rst' , 'docs/installation.rst'
 
            , 'docs/quickstart.rst' , 'setup.cfg' , 'vcs/utils/baseui_config.py'
 
            , 'vcs/utils/web.py' , 'vcs/web/__init__.py' , 'vcs/web/exceptions.py'
 
            , 'vcs/web/simplevcs/__init__.py' , 'vcs/web/simplevcs/exceptions.py'
 
            , 'vcs/web/simplevcs/middleware.py' , 'vcs/web/simplevcs/models.py'
 
            , 'vcs/web/simplevcs/settings.py' , 'vcs/web/simplevcs/utils.py'
 
            , 'vcs/web/simplevcs/views.py'])
 

	
 
        removed = set(['docs/api.rst'])
 

	
 
        chset64 = self.repo.get_changeset('46ad32a4f974')
 
        self.assertEqual(set((node.path for node in chset64.added)), added)
 
        self.assertEqual(set((node.path for node in chset64.changed)), changed)
 
        self.assertEqual(set((node.path for node in chset64.removed)), removed)
 

	
 
        # rev b090f22d27d6:
 
        # hg st --rev b090f22d27d6
 
        #    changed: 13
 
        #    added:   20
 
        #    removed: 1
 
        chset88 = self.repo.get_changeset('b090f22d27d6')
 
        self.assertEqual(set((node.path for node in chset88.added)), set())
 
        self.assertEqual(set((node.path for node in chset88.changed)),
 
            set(['.hgignore']))
 
        self.assertEqual(set((node.path for node in chset88.removed)), set())
 
#
 
        # 85:
 
        #    added:   2 ['vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
 
        #    changed: 4 ['vcs/web/simplevcs/models.py', ...]
 
        #    removed: 1 ['vcs/utils/web.py']
 
        chset85 = self.repo.get_changeset(85)
 
        self.assertEqual(set((node.path for node in chset85.added)), set([
 
            'vcs/utils/diffs.py',
 
            'vcs/web/simplevcs/views/diffs.py']))
 
        self.assertEqual(set((node.path for node in chset85.changed)), set([
 
            'vcs/web/simplevcs/models.py',
 
            'vcs/web/simplevcs/utils.py',
 
            'vcs/web/simplevcs/views/__init__.py',
 
            'vcs/web/simplevcs/views/repository.py',
 
            ]))
 
        self.assertEqual(set((node.path for node in chset85.removed)),
 
            set(['vcs/utils/web.py']))
 

	
 

	
 
    def test_files_state(self):
 
        """
 
        Tests state of FileNodes.
 
        """
 
        chset = self.repo.get_changeset(85)
 
        node = chset.get_node('vcs/utils/diffs.py')
 
        self.assertTrue(node.state, NodeState.ADDED)
 
        self.assertTrue(node.added)
 
        self.assertFalse(node.changed)
 
        self.assertFalse(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        chset = self.repo.get_changeset(88)
 
        node = chset.get_node('.hgignore')
 
        self.assertTrue(node.state, NodeState.CHANGED)
 
        self.assertFalse(node.added)
 
        self.assertTrue(node.changed)
 
        self.assertFalse(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        chset = self.repo.get_changeset(85)
 
        node = chset.get_node('setup.py')
 
        self.assertTrue(node.state, NodeState.NOT_CHANGED)
 
        self.assertFalse(node.added)
 
        self.assertFalse(node.changed)
 
        self.assertTrue(node.not_changed)
 
        self.assertFalse(node.removed)
 

	
 
        # If node has REMOVED state then trying to fetch it would raise
 
        # ChangesetError exception
 
        chset = self.repo.get_changeset(2)
 
        path = 'vcs/backends/BaseRepository.py'
 
        self.assertRaises(NodeDoesNotExistError, chset.get_node, path)
 
        # but it would be one of ``removed`` (changeset's attribute)
 
        self.assertTrue(path in [rf.path for rf in chset.removed])
 

	
 
    def test_commit_message_is_unicode(self):
 
        for cm in self.repo:
 
            self.assertEqual(type(cm.message), unicode)
 

	
 
    def test_changeset_author_is_unicode(self):
 
        for cm in self.repo:
 
            self.assertEqual(type(cm.author), unicode)
 

	
 
    def test_repo_files_content_is_unicode(self):
 
        test_changeset = self.repo.get_changeset(100)
 
        for node in test_changeset.get_node('/'):
 
            if node.is_file():
 
                self.assertEqual(type(node.content), unicode)
 

	
 
    def test_wrong_path(self):
 
        # There is 'setup.py' in the root dir but not there:
 
        path = 'foo/bar/setup.py'
 
        self.assertRaises(VCSError, self.repo.get_changeset().get_node, path)
 

	
 

	
 
    def test_archival_file(self):
 
        #TODO:
 
        pass
 

	
 
    def test_archival_as_generator(self):
 
        #TODO:
 
        pass
 

	
 
    def test_archival_wrong_kind(self):
 
        tip = self.repo.get_changeset()
 
        self.assertRaises(VCSError, tip.fill_archive, kind='error')
 

	
 
    def test_archival_empty_prefix(self):
 
        #TODO:
 
        pass
 

	
 

	
 
    def test_author_email(self):
 
        self.assertEqual('marcin@python-blog.com',
 
                         self.repo.get_changeset('b986218ba1c9').author_email)
 
        self.assertEqual('lukasz.balcerzak@python-center.pl',
 
                         self.repo.get_changeset('3803844fdbd3').author_email)
 
        self.assertEqual('',
 
                         self.repo.get_changeset('84478366594b').author_email)
 

	
 
    def test_author_username(self):
 
        self.assertEqual('Marcin Kuzminski',
 
                         self.repo.get_changeset('b986218ba1c9').author_name)
 
        self.assertEqual('Lukasz Balcerzak',
 
                         self.repo.get_changeset('3803844fdbd3').author_name)
 
        self.assertEqual('marcink',
 
                         self.repo.get_changeset('84478366594b').author_name)
kallithea/tests/vcs/test_inmemchangesets.py
Show inline comments
 
# encoding: utf8
 
"""
 
Tests so called "in memory changesets" commit API of vcs.
 
"""
 
from __future__ import with_statement
 

	
 
import time
 
import datetime
 

	
 
from kallithea.lib import vcs
 
from kallithea.tests.vcs.conf import SCM_TESTS, get_new_dir
 
from kallithea.lib.vcs.exceptions import EmptyRepositoryError
 
from kallithea.lib.vcs.exceptions import NodeAlreadyAddedError
 
from kallithea.lib.vcs.exceptions import NodeAlreadyExistsError
 
from kallithea.lib.vcs.exceptions import NodeAlreadyRemovedError
 
from kallithea.lib.vcs.exceptions import NodeAlreadyChangedError
 
from kallithea.lib.vcs.exceptions import NodeDoesNotExistError
 
from kallithea.lib.vcs.exceptions import NodeNotChangedError
 
from kallithea.lib.vcs.nodes import DirNode
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.utils import safe_unicode
 

	
 

	
 
class InMemoryChangesetTestMixin(object):
 
    """
 
    This is a backend independent test case class which should be created
 
    with ``type`` method.
 

	
 
    It is required to set following attributes at subclass:
 

	
 
    - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
 
    - ``repo_path``: path to the repository which would be created for set of
 
      tests
 
    """
 

	
 
    def get_backend(self):
 
        return vcs.get_backend(self.backend_alias)
 

	
 
    def setUp(self):
 
        Backend = self.get_backend()
 
        self.repo_path = get_new_dir(str(time.time()))
 
        self.repo = Backend(self.repo_path, create=True)
 
        self.imc = self.repo.in_memory_changeset
 
        self.nodes = [
 
            FileNode('foobar', content='Foo & bar'),
 
            FileNode('foobar2', content='Foo & bar, doubled!'),
 
            FileNode('foo bar with spaces', content=''),
 
            FileNode('foo/bar/baz', content='Inside'),
 
            FileNode('foo/bar/file.bin', content='\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'),
 
        ]
 

	
 
    def test_add(self):
 
        rev_count = len(self.repo.revisions)
 
        to_add = [FileNode(node.path, content=node.content)
 
            for node in self.nodes]
 
        for node in to_add:
 
            self.imc.add(node)
 
        message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
 
        author = unicode(self.__class__)
 
        changeset = self.imc.commit(message=message, author=author)
 

	
 
        newtip = self.repo.get_changeset()
 
        self.assertEqual(changeset, newtip)
 
        self.assertEqual(rev_count + 1, len(self.repo.revisions))
 
        self.assertEqual(newtip.message, message)
 
        self.assertEqual(newtip.author, author)
 
        self.assertTrue(not any((self.imc.added, self.imc.changed,
 
            self.imc.removed)))
 
        for node in to_add:
 
            self.assertEqual(newtip.get_node(node.path).content, node.content)
 

	
 
    def test_add_in_bulk(self):
 
        rev_count = len(self.repo.revisions)
 
        to_add = [FileNode(node.path, content=node.content)
 
            for node in self.nodes]
 
        self.imc.add(*to_add)
 
        message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
 
        author = unicode(self.__class__)
 
        changeset = self.imc.commit(message=message, author=author)
 

	
 
        newtip = self.repo.get_changeset()
 
        self.assertEqual(changeset, newtip)
 
        self.assertEqual(rev_count + 1, len(self.repo.revisions))
 
        self.assertEqual(newtip.message, message)
 
        self.assertEqual(newtip.author, author)
 
        self.assertTrue(not any((self.imc.added, self.imc.changed,
 
            self.imc.removed)))
 
        for node in to_add:
 
            self.assertEqual(newtip.get_node(node.path).content, node.content)
 

	
 
    def test_add_actually_adds_all_nodes_at_second_commit_too(self):
 
        self.imc.add(FileNode('foo/bar/image.png', content='\0'))
 
        self.imc.add(FileNode('foo/README.txt', content='readme!'))
 
        changeset = self.imc.commit(u'Initial', u'joe.doe@example.com')
 
        self.assertTrue(isinstance(changeset.get_node('foo'), DirNode))
 
        self.assertTrue(isinstance(changeset.get_node('foo/bar'), DirNode))
 
        self.assertEqual(changeset.get_node('foo/bar/image.png').content, '\0')
 
        self.assertEqual(changeset.get_node('foo/README.txt').content, 'readme!')
 

	
 
        # commit some more files again
 
        to_add = [
 
            FileNode('foo/bar/foobaz/bar', content='foo'),
 
            FileNode('foo/bar/another/bar', content='foo'),
 
            FileNode('foo/baz.txt', content='foo'),
 
            FileNode('foobar/foobaz/file', content='foo'),
 
            FileNode('foobar/barbaz', content='foo'),
 
        ]
 
        self.imc.add(*to_add)
 
        changeset = self.imc.commit(u'Another', u'joe.doe@example.com')
 
        self.assertEqual(changeset.get_node('foo/bar/foobaz/bar').content, 'foo')
 
        self.assertEqual(changeset.get_node('foo/bar/another/bar').content, 'foo')
 
        self.assertEqual(changeset.get_node('foo/baz.txt').content, 'foo')
 
        self.assertEqual(changeset.get_node('foobar/foobaz/file').content, 'foo')
 
        self.assertEqual(changeset.get_node('foobar/barbaz').content, 'foo')
 

	
 
    def test_add_non_ascii_files(self):
 
        rev_count = len(self.repo.revisions)
 
        to_add = [
 
            FileNode('żółwik/zwierzątko', content='ćććć'),
 
            FileNode(u'żółwik/zwierzątko_uni', content=u'ćććć'),
 
        ]
 
        for node in to_add:
 
            self.imc.add(node)
 
        message = u'Added: %s' % ', '.join((node.path for node in self.nodes))
 
        author = unicode(self.__class__)
 
        changeset = self.imc.commit(message=message, author=author)
 

	
 
        newtip = self.repo.get_changeset()
 
        self.assertEqual(changeset, newtip)
 
        self.assertEqual(rev_count + 1, len(self.repo.revisions))
 
        self.assertEqual(newtip.message, message)
 
        self.assertEqual(newtip.author, author)
 
        self.assertTrue(not any((self.imc.added, self.imc.changed,
 
            self.imc.removed)))
 
        for node in to_add:
 
            self.assertEqual(newtip.get_node(node.path).content, node.content)
 

	
 
    def test_add_raise_already_added(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.assertRaises(NodeAlreadyAddedError, self.imc.add, node)
 

	
 
    def test_check_integrity_raise_already_exist(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message=u'Added foobar', author=unicode(self))
 
        self.imc.add(node)
 
        self.assertRaises(NodeAlreadyExistsError, self.imc.commit,
 
            message='new message',
 
            author=str(self))
 

	
 
    def test_change(self):
 
        self.imc.add(FileNode('foo/bar/baz', content='foo'))
 
        self.imc.add(FileNode('foo/fbar', content='foobar'))
 
        tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
 

	
 
        # Change node's content
 
        node = FileNode('foo/bar/baz', content='My **changed** content')
 
        self.imc.change(node)
 
        self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
 

	
 
        newtip = self.repo.get_changeset()
 
        self.assertNotEqual(tip, newtip)
 
        self.assertNotEqual(tip.id, newtip.id)
 
        self.assertEqual(newtip.get_node('foo/bar/baz').content,
 
                        'My **changed** content')
 

	
 
    def test_change_non_ascii(self):
 
        to_add = [
 
            FileNode('żółwik/zwierzątko', content='ćććć'),
 
            FileNode(u'żółwik/zwierzątko_uni', content=u'ćććć'),
 
        ]
 
        for node in to_add:
 
            self.imc.add(node)
 

	
 
        tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
 

	
 
        # Change node's content
 
        node = FileNode('żółwik/zwierzątko', content='My **changed** content')
 
        self.imc.change(node)
 
        self.imc.commit(u'Changed %s' % safe_unicode(node.path),
 
                        u'joe.doe@example.com')
 

	
 
        node = FileNode(u'żółwik/zwierzątko_uni', content=u'My **changed** content')
 
        self.imc.change(node)
 
        self.imc.commit(u'Changed %s' % safe_unicode(node.path),
 
                        u'joe.doe@example.com')
 

	
 
        newtip = self.repo.get_changeset()
 
        self.assertNotEqual(tip, newtip)
 
        self.assertNotEqual(tip.id, newtip.id)
 

	
 
        self.assertEqual(newtip.get_node('żółwik/zwierzątko').content,
 
                         'My **changed** content')
 
        self.assertEqual(newtip.get_node('żółwik/zwierzątko_uni').content,
 
                         'My **changed** content')
 

	
 
    def test_change_raise_empty_repository(self):
 
        node = FileNode('foobar')
 
        self.assertRaises(EmptyRepositoryError, self.imc.change, node)
 

	
 
    def test_check_integrity_change_raise_node_does_not_exist(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message=u'Added foobar', author=unicode(self))
 
        node = FileNode('not-foobar', content='')
 
        self.imc.change(node)
 
        self.assertRaises(NodeDoesNotExistError, self.imc.commit,
 
            message='Changed not existing node',
 
            author=str(self))
 

	
 
    def test_change_raise_node_already_changed(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message=u'Added foobar', author=unicode(self))
 
        node = FileNode('foobar', content='more baz')
 
        self.imc.change(node)
 
        self.assertRaises(NodeAlreadyChangedError, self.imc.change, node)
 

	
 
    def test_check_integrity_change_raise_node_not_changed(self):
 
        self.test_add()  # Performs first commit
 

	
 
        node = FileNode(self.nodes[0].path, content=self.nodes[0].content)
 
        self.imc.change(node)
 
        self.assertRaises(NodeNotChangedError, self.imc.commit,
 
            message=u'Trying to mark node as changed without touching it',
 
            author=unicode(self))
 

	
 
    def test_change_raise_node_already_removed(self):
 
        node = FileNode('foobar', content='baz')
 
        self.imc.add(node)
 
        self.imc.commit(message=u'Added foobar', author=unicode(self))
 
        self.imc.remove(FileNode('foobar'))
 
        self.assertRaises(NodeAlreadyRemovedError, self.imc.change, node)
 

	
 
    def test_remove(self):
 
        self.test_add()  # Performs first commit
 

	
 
        tip = self.repo.get_changeset()
 
        node = self.nodes[0]
 
        self.assertEqual(node.content, tip.get_node(node.path).content)
 
        self.imc.remove(node)
 
        self.imc.commit(message=u'Removed %s' % node.path, author=unicode(self))
 

	
 
        newtip = self.repo.get_changeset()
 
        self.assertNotEqual(tip, newtip)
 
        self.assertNotEqual(tip.id, newtip.id)
 
        self.assertRaises(NodeDoesNotExistError, newtip.get_node, node.path)
 

	
 
    def test_remove_last_file_from_directory(self):
 
        node = FileNode('omg/qwe/foo/bar', content='foobar')
 
        self.imc.add(node)
 
        self.imc.commit(u'added', u'joe doe')
 

	
 
        self.imc.remove(node)
 
        tip = self.imc.commit(u'removed', u'joe doe')
 
        self.assertRaises(NodeDoesNotExistError, tip.get_node, 'omg/qwe/foo/bar')
 

	
 
    def test_remove_raise_node_does_not_exist(self):
 
        self.imc.remove(self.nodes[0])
 
        self.assertRaises(NodeDoesNotExistError, self.imc.commit,
 
            message='Trying to remove node at empty repository',
 
            author=str(self))
 

	
 
    def test_check_integrity_remove_raise_node_does_not_exist(self):
 
        self.test_add()  # Performs first commit
 

	
 
        node = FileNode('no-such-file')
 
        self.imc.remove(node)
 
        self.assertRaises(NodeDoesNotExistError, self.imc.commit,
 
            message=u'Trying to remove not existing node',
 
            author=unicode(self))
 

	
 
    def test_remove_raise_node_already_removed(self):
 
        self.test_add() # Performs first commit
 

	
 
        node = FileNode(self.nodes[0].path)
 
        self.imc.remove(node)
 
        self.assertRaises(NodeAlreadyRemovedError, self.imc.remove, node)
 

	
 
    def test_remove_raise_node_already_changed(self):
 
        self.test_add()  # Performs first commit
 

	
 
        node = FileNode(self.nodes[0].path, content='Bending time')
 
        self.imc.change(node)
 
        self.assertRaises(NodeAlreadyChangedError, self.imc.remove, node)
 

	
 
    def test_reset(self):
 
        self.imc.add(FileNode('foo', content='bar'))
 
        #self.imc.change(FileNode('baz', content='new'))
 
        #self.imc.remove(FileNode('qwe'))
 
        self.imc.reset()
 
        self.assertTrue(not any((self.imc.added, self.imc.changed,
 
            self.imc.removed)))
 

	
 
    def test_multiple_commits(self):
 
        N = 3  # number of commits to perform
 
        last = None
 
        for x in xrange(N):
 
            fname = 'file%s' % str(x).rjust(5, '0')
 
            content = 'foobar\n' * x
 
            node = FileNode(fname, content=content)
 
            self.imc.add(node)
 
            commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
 
            self.assertTrue(last != commit)
 
            last = commit
 

	
 
        # Check commit number for same repo
 
        self.assertEqual(len(self.repo.revisions), N)
 

	
 
        # Check commit number for recreated repo
 
        backend = self.get_backend()
 
        repo = backend(self.repo_path)
 
        self.assertEqual(len(repo.revisions), N)
 

	
 
    def test_date_attr(self):
 
        node = FileNode('foobar.txt', content='Foobared!')
 
        self.imc.add(node)
 
        date = datetime.datetime(1985, 1, 30, 1, 45)
 
        commit = self.imc.commit(u"Committed at time when I was born ;-)",
 
            author=u'lb', date=date)
 

	
 
        self.assertEqual(commit.date, date)
 

	
 

	
 
class BackendBaseTestCase(unittest.TestCase):
 
    """
 
    Base test class for tests which requires repository.
 
    """
 
    backend_alias = 'hg'
 
    commits = [
 
        {
 
            'message': 'Initial commit',
 
            'author': 'Joe Doe <joe.doe@example.com>',
 
            'date': datetime.datetime(2010, 1, 1, 20),
 
            'added': [
 
                FileNode('foobar', content='Foobar'),
 
                FileNode('foobar2', content='Foobar II'),
 
                FileNode('foo/bar/baz', content='baz here!'),
 
            ],
 
        },
 
    ]
 

	
 
    def get_backend(self):
 
        return vcs.get_backend(self.backend_alias)
 

	
 
    def get_commits(self):
 
        """
 
        Returns list of commits which builds repository for each tests.
 
        """
 
        if hasattr(self, 'commits'):
 
            return self.commits
 

	
 
    def get_new_repo_path(self):
 
        """
 
        Returns newly created repository's directory.
 
        """
 
        backend = self.get_backend()
 
        key = '%s-%s' % (self.backend_alias, str(time.time()))
 
        repo_path = get_new_dir(key)
 
        return repo_path
 

	
 
    def setUp(self):
 
        Backend = self.get_backend()
 
        self.backend_class = Backend
 
        self.repo_path = self.get_new_repo_path()
 
        self.repo = Backend(self.repo_path, create=True)
 
        self.imc = self.repo.in_memory_changeset
 

	
 
        for commit in self.get_commits():
 
            for node in commit.get('added', []):
 
                self.imc.add(FileNode(node.path, content=node.content))
 
            for node in commit.get('changed', []):
 
                self.imc.change(FileNode(node.path, content=node.content))
 
            for node in commit.get('removed', []):
 
                self.imc.remove(FileNode(node.path))
 
            self.imc.commit(message=unicode(commit['message']),
 
                            author=unicode(commit['author']),
 
                date=commit['date'])
 

	
 
        self.tip = self.repo.get_changeset()
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s in memory changeset test' % alias).title().split())
 
    bases = (InMemoryChangesetTestMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_nodes.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import stat
 
from kallithea.lib.vcs.nodes import DirNode
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.nodes import Node
 
from kallithea.lib.vcs.nodes import NodeError
 
from kallithea.lib.vcs.nodes import NodeKind
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class NodeBasicTest(unittest.TestCase):
 

	
 
    def test_init(self):
 
        """
 
        Cannot innitialize Node objects with path with slash at the beginning.
 
        """
 
        wrong_paths = (
 
            '/foo',
 
            '/foo/bar'
 
        )
 
        for path in wrong_paths:
 
            self.assertRaises(NodeError, Node, path, NodeKind.FILE)
 

	
 
        wrong_paths = (
 
            '/foo/',
 
            '/foo/bar/'
 
        )
 
        for path in wrong_paths:
 
            self.assertRaises(NodeError, Node, path, NodeKind.DIR)
 

	
 
    def test_name(self):
 
        node = Node('', NodeKind.DIR)
 
        self.assertEqual(node.name, '')
 

	
 
        node = Node('path', NodeKind.FILE)
 
        self.assertEqual(node.name, 'path')
 

	
 
        node = Node('path/', NodeKind.DIR)
 
        self.assertEqual(node.name, 'path')
 

	
 
        node = Node('some/path', NodeKind.FILE)
 
        self.assertEqual(node.name, 'path')
 

	
 
        node = Node('some/path/', NodeKind.DIR)
 
        self.assertEqual(node.name, 'path')
 

	
 
    def test_root_node(self):
 
        self.assertRaises(NodeError, Node, '', NodeKind.FILE)
 

	
 
    def test_kind_setter(self):
 
        node = Node('', NodeKind.DIR)
 
        self.assertRaises(NodeError, setattr, node, 'kind', NodeKind.FILE)
 

	
 
    def _test_parent_path(self, node_path, expected_parent_path):
 
        """
 
        Tests if node's parent path are properly computed.
 
        """
 
        node = Node(node_path, NodeKind.DIR)
 
        parent_path = node.get_parent_path()
 
        self.assertTrue(parent_path.endswith('/') or \
 
            node.is_root() and parent_path == '')
 
        self.assertEqual(parent_path, expected_parent_path,
 
            "Node's path is %r and parent path is %r but should be %r"
 
            % (node.path, parent_path, expected_parent_path))
 

	
 
    def test_parent_path(self):
 
        test_paths = (
 
            # (node_path, expected_parent_path)
 
            ('', ''),
 
            ('some/path/', 'some/'),
 
            ('some/longer/path/', 'some/longer/'),
 
        )
 
        for node_path, expected_parent_path in test_paths:
 
            self._test_parent_path(node_path, expected_parent_path)
 

	
 
    '''
 
    def _test_trailing_slash(self, path):
 
        if not path.endswith('/'):
 
            self.fail("Trailing slash tests needs paths to end with slash")
 
        for kind in NodeKind.FILE, NodeKind.DIR:
 
            self.assertRaises(NodeError, Node, path=path, kind=kind)
 

	
 
    def test_trailing_slash(self):
 
        for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'):
 
            self._test_trailing_slash(path)
 
    '''
 

	
 
    def test_is_file(self):
 
        node = Node('any', NodeKind.FILE)
 
        self.assertTrue(node.is_file())
 

	
 
        node = FileNode('any')
 
        self.assertTrue(node.is_file())
 
        self.assertRaises(AttributeError, getattr, node, 'nodes')
 

	
 
    def test_is_dir(self):
 
        node = Node('any_dir', NodeKind.DIR)
 
        self.assertTrue(node.is_dir())
 

	
 
        node = DirNode('any_dir')
 

	
 
        self.assertTrue(node.is_dir())
 
        self.assertRaises(NodeError, getattr, node, 'content')
 

	
 
    def test_dir_node_iter(self):
 
        nodes = [
 
            DirNode('docs'),
 
            DirNode('tests'),
 
            FileNode('bar'),
 
            FileNode('foo'),
 
            FileNode('readme.txt'),
 
            FileNode('setup.py'),
 
        ]
 
        dirnode = DirNode('', nodes=nodes)
 
        for node in dirnode:
 
            node == dirnode.get_node(node.path)
 

	
 
    def test_node_state(self):
 
        """
 
        Without link to changeset nodes should raise NodeError.
 
        """
 
        node = FileNode('anything')
 
        self.assertRaises(NodeError, getattr, node, 'state')
 
        node = DirNode('anything')
 
        self.assertRaises(NodeError, getattr, node, 'state')
 

	
 
    def test_file_node_stat(self):
 
        node = FileNode('foobar', 'empty... almost')
 
        mode = node.mode  # default should be 0100644
 
        self.assertTrue(mode & stat.S_IRUSR)
 
        self.assertTrue(mode & stat.S_IWUSR)
 
        self.assertTrue(mode & stat.S_IRGRP)
 
        self.assertTrue(mode & stat.S_IROTH)
 
        self.assertFalse(mode & stat.S_IWGRP)
 
        self.assertFalse(mode & stat.S_IWOTH)
 
        self.assertFalse(mode & stat.S_IXUSR)
 
        self.assertFalse(mode & stat.S_IXGRP)
 
        self.assertFalse(mode & stat.S_IXOTH)
 

	
 
    def test_file_node_is_executable(self):
 
        node = FileNode('foobar', 'empty... almost', mode=0100755)
 
        self.assertTrue(node.is_executable)
 

	
 
        node = FileNode('foobar', 'empty... almost', mode=0100500)
 
        self.assertTrue(node.is_executable)
 

	
 
        node = FileNode('foobar', 'empty... almost', mode=0100644)
 
        self.assertFalse(node.is_executable)
 

	
 
    def test_mimetype(self):
 
        py_node = FileNode('test.py')
 
        tar_node = FileNode('test.tar.gz')
 

	
 
        ext = 'CustomExtension'
 

	
 
        my_node2 = FileNode('myfile2')
 
        my_node2._mimetype = [ext]
 

	
 
        my_node3 = FileNode('myfile3')
 
        my_node3._mimetype = [ext,ext]
 

	
 
        self.assertEqual(py_node.mimetype,'text/x-python')
 
        self.assertEqual(py_node.get_mimetype(),('text/x-python',None))
 

	
 
        self.assertEqual(tar_node.mimetype,'application/x-tar')
 
        self.assertEqual(tar_node.get_mimetype(),('application/x-tar','gzip'))
 

	
 
        self.assertRaises(NodeError,my_node2.get_mimetype)
 

	
 
        self.assertEqual(my_node3.mimetype,ext)
 
        self.assertEqual(my_node3.get_mimetype(),[ext,ext])
 

	
 
class NodeContentTest(unittest.TestCase):
 

	
 
    def test_if_binary(self):
 
        data = """\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?"\x14j?\xa2M\x7fB\x14F\x9aQ?&\x842?\x0b\x89"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?=\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq"Sw.\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$"q[\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?\x9f\x8cE??x\x94??\r\xbdtoJU5"0N\x10U?\x00??V\t\x02\x9f\x81?U?\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&"?\xb7ZP \x0c<?O\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J\x0bV"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u\xb2?1\xbe|/\x92M@\xa2!F?\xa9>"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00IEND\xaeB`\x82"""
 
        filenode = FileNode('calendar.png', content=data)
 
        self.assertTrue(filenode.is_binary)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_repository.py
Show inline comments
 
from __future__ import with_statement
 
import datetime
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.tests.vcs.conf import TEST_USER_CONFIG_FILE
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 

	
 

	
 
class RepositoryBaseTest(_BackendTestMixin):
 
    recreate_repo_per_test = False
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        return super(RepositoryBaseTest, cls)._get_commits()[:1]
 

	
 
    def test_get_config_value(self):
 
        self.assertEqual(self.repo.get_config_value('universal', 'foo',
 
            TEST_USER_CONFIG_FILE), 'bar')
 

	
 
    def test_get_config_value_defaults_to_None(self):
 
        self.assertEqual(self.repo.get_config_value('universal', 'nonexist',
 
            TEST_USER_CONFIG_FILE), None)
 

	
 
    def test_get_user_name(self):
 
        self.assertEqual(self.repo.get_user_name(TEST_USER_CONFIG_FILE),
 
            'Foo Bar')
 

	
 
    def test_get_user_email(self):
 
        self.assertEqual(self.repo.get_user_email(TEST_USER_CONFIG_FILE),
 
            'foo.bar@example.com')
 

	
 
    def test_repo_equality(self):
 
        self.assertTrue(self.repo == self.repo)
 

	
 
    def test_repo_equality_broken_object(self):
 
        import copy
 
        _repo = copy.copy(self.repo)
 
        delattr(_repo, 'path')
 
        self.assertTrue(self.repo != _repo)
 

	
 
    def test_repo_equality_other_object(self):
 
        class dummy(object):
 
            path = self.repo.path
 
        self.assertTrue(self.repo != dummy())
 

	
 

	
 
class RepositoryGetDiffTest(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': 'Initial commit',
 
                'author': 'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='foobar'),
 
                    FileNode('foobar2', content='foobar2'),
 
                ],
 
            },
 
            {
 
                'message': 'Changed foobar, added foobar3',
 
                'author': 'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('foobar3', content='foobar3'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'FOOBAR'),
 
                ],
 
            },
 
            {
 
                'message': 'Removed foobar, changed foobar3',
 
                'author': 'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 22),
 
                'changed': [
 
                    FileNode('foobar3', content='FOOBAR\nFOOBAR\nFOOBAR\n'),
 
                ],
 
                'removed': [FileNode('foobar')],
 
            },
 
        ]
 
        return commits
 

	
 
    def test_raise_for_wrong(self):
 
        with self.assertRaises(ChangesetDoesNotExistError):
 
            self.repo.get_diff('a' * 40, 'b' * 40)
 

	
 

	
 
class GitRepositoryGetDiffTest(RepositoryGetDiffTest, unittest.TestCase):
 
    backend_alias = 'git'
 

	
 
    def test_initial_commit_diff(self):
 
        initial_rev = self.repo.revisions[0]
 
        self.assertEqual(self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev), '''diff --git a/foobar b/foobar
 
new file mode 100644
 
index 0000000000000000000000000000000000000000..f6ea0495187600e7b2288c8ac19c5886383a4632
 
--- /dev/null
 
+++ b/foobar
 
@@ -0,0 +1 @@
 
+foobar
 
\ No newline at end of file
 
diff --git a/foobar2 b/foobar2
 
new file mode 100644
 
index 0000000000000000000000000000000000000000..e8c9d6b98e3dce993a464935e1a53f50b56a3783
 
--- /dev/null
 
+++ b/foobar2
 
@@ -0,0 +1 @@
 
+foobar2
 
\ No newline at end of file
 
''')
 

	
 
    def test_second_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[0], revs[1]), '''diff --git a/foobar b/foobar
 
index f6ea0495187600e7b2288c8ac19c5886383a4632..389865bb681b358c9b102d79abd8d5f941e96551 100644
 
--- a/foobar
 
+++ b/foobar
 
@@ -1 +1 @@
 
-foobar
 
\ No newline at end of file
 
+FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
new file mode 100644
 
index 0000000000000000000000000000000000000000..c11c37d41d33fb47741cff93fa5f9d798c1535b0
 
--- /dev/null
 
+++ b/foobar3
 
@@ -0,0 +1 @@
 
+foobar3
 
\ No newline at end of file
 
''')
 

	
 
    def test_third_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[1], revs[2]), '''diff --git a/foobar b/foobar
 
deleted file mode 100644
 
index 389865bb681b358c9b102d79abd8d5f941e96551..0000000000000000000000000000000000000000
 
--- a/foobar
 
+++ /dev/null
 
@@ -1 +0,0 @@
 
-FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
index c11c37d41d33fb47741cff93fa5f9d798c1535b0..f9324477362684ff692aaf5b9a81e01b9e9a671c 100644
 
--- a/foobar3
 
+++ b/foobar3
 
@@ -1 +1,3 @@
 
-foobar3
 
\ No newline at end of file
 
+FOOBAR
 
+FOOBAR
 
+FOOBAR
 
''')
 

	
 

	
 
class HgRepositoryGetDiffTest(RepositoryGetDiffTest, unittest.TestCase):
 
    backend_alias = 'hg'
 

	
 
    def test_initial_commit_diff(self):
 
        initial_rev = self.repo.revisions[0]
 
        self.assertEqual(self.repo.get_diff(self.repo.EMPTY_CHANGESET, initial_rev), '''diff --git a/foobar b/foobar
 
new file mode 100644
 
--- /dev/null
 
+++ b/foobar
 
@@ -0,0 +1,1 @@
 
+foobar
 
\ No newline at end of file
 
diff --git a/foobar2 b/foobar2
 
new file mode 100644
 
--- /dev/null
 
+++ b/foobar2
 
@@ -0,0 +1,1 @@
 
+foobar2
 
\ No newline at end of file
 
''')
 

	
 
    def test_second_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[0], revs[1]), '''diff --git a/foobar b/foobar
 
--- a/foobar
 
+++ b/foobar
 
@@ -1,1 +1,1 @@
 
-foobar
 
\ No newline at end of file
 
+FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
new file mode 100644
 
--- /dev/null
 
+++ b/foobar3
 
@@ -0,0 +1,1 @@
 
+foobar3
 
\ No newline at end of file
 
''')
 

	
 
    def test_third_changeset_diff(self):
 
        revs = self.repo.revisions
 
        self.assertEqual(self.repo.get_diff(revs[1], revs[2]), '''diff --git a/foobar b/foobar
 
deleted file mode 100644
 
--- a/foobar
 
+++ /dev/null
 
@@ -1,1 +0,0 @@
 
-FOOBAR
 
\ No newline at end of file
 
diff --git a/foobar3 b/foobar3
 
--- a/foobar3
 
+++ b/foobar3
 
@@ -1,1 +1,3 @@
 
-foobar3
 
\ No newline at end of file
 
+FOOBAR
 
+FOOBAR
 
+FOOBAR
 
''')
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = alias.capitalize() + RepositoryBaseTest.__name__
 
    bases = (RepositoryBaseTest, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_tags.py
Show inline comments
 
from __future__ import with_statement
 

	
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 
from kallithea.lib.vcs.exceptions import TagAlreadyExistError
 
from kallithea.lib.vcs.exceptions import TagDoesNotExistError
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class TagsTestCaseMixin(_BackendTestMixin):
 

	
 
    def test_new_tag(self):
 
        tip = self.repo.get_changeset()
 
        tagsize = len(self.repo.tags)
 
        tag = self.repo.tag('last-commit', 'joe', tip.raw_id)
 

	
 
        self.assertEqual(len(self.repo.tags), tagsize + 1)
 
        for top, dirs, files in tip.walk():
 
            self.assertEqual(top, tag.get_node(top.path))
 

	
 
    def test_tag_already_exist(self):
 
        tip = self.repo.get_changeset()
 
        self.repo.tag('last-commit', 'joe', tip.raw_id)
 

	
 
        self.assertRaises(TagAlreadyExistError,
 
            self.repo.tag, 'last-commit', 'joe', tip.raw_id)
 

	
 
        chset = self.repo.get_changeset(0)
 
        self.assertRaises(TagAlreadyExistError,
 
            self.repo.tag, 'last-commit', 'jane', chset.raw_id)
 

	
 
    def test_remove_tag(self):
 
        tip = self.repo.get_changeset()
 
        self.repo.tag('last-commit', 'joe', tip.raw_id)
 
        tagsize = len(self.repo.tags)
 

	
 
        self.repo.remove_tag('last-commit', user='evil joe')
 
        self.assertEqual(len(self.repo.tags), tagsize - 1)
 

	
 
    def test_remove_tag_which_does_not_exist(self):
 
        self.assertRaises(TagDoesNotExistError,
 
            self.repo.remove_tag, 'last-commit', user='evil joe')
 

	
 
    def test_name_with_slash(self):
 
        self.repo.tag('19/10/11', 'joe')
 
        self.assertTrue('19/10/11' in self.repo.tags)
 
        self.repo.tag('11', 'joe')
 
        self.assertTrue('11' in self.repo.tags)
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s tags test' % alias).title().split())
 
    bases = (TagsTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_utils.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
import mock
 
import time
 
import shutil
 
import tempfile
 
import datetime
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.lib.vcs.utils.paths import get_dirs_for_path
 
from kallithea.lib.vcs.utils.helpers import get_dict_for_attrs
 
from kallithea.lib.vcs.utils.helpers import get_scm
 
from kallithea.lib.vcs.utils.helpers import get_scms_for_path
 
from kallithea.lib.vcs.utils.helpers import get_total_seconds
 
from kallithea.lib.vcs.utils.helpers import parse_changesets
 
from kallithea.lib.vcs.utils.helpers import parse_datetime
 
from kallithea.lib.vcs.utils import author_email, author_name
 
from kallithea.lib.vcs.utils.paths import get_user_home
 
from kallithea.lib.vcs.exceptions import VCSError
 

	
 
from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_GIT_REPO, TEST_TMP_PATH
 

	
 

	
 
class PathsTest(unittest.TestCase):
 

	
 
    def _test_get_dirs_for_path(self, path, expected):
 
        """
 
        Tests if get_dirs_for_path returns same as expected.
 
        """
 
        expected = sorted(expected)
 
        result = sorted(get_dirs_for_path(path))
 
        self.assertEqual(result, expected,
 
            msg="%s != %s which was expected result for path %s"
 
            % (result, expected, path))
 

	
 
    def test_get_dirs_for_path(self):
 
        path = 'foo/bar/baz/file'
 
        paths_and_results = (
 
            ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
 
            ('foo/bar/', ['foo', 'foo/bar']),
 
            ('foo/bar', ['foo']),
 
        )
 
        for path, expected in paths_and_results:
 
            self._test_get_dirs_for_path(path, expected)
 

	
 

	
 
    def test_get_scm(self):
 
        self.assertEqual(('hg', TEST_HG_REPO), get_scm(TEST_HG_REPO))
 
        self.assertEqual(('git', TEST_GIT_REPO), get_scm(TEST_GIT_REPO))
 

	
 
    def test_get_two_scms_for_path(self):
 
        multialias_repo_path = os.path.join(TEST_TMP_PATH, 'hg-git-repo-2')
 
        if os.path.isdir(multialias_repo_path):
 
            shutil.rmtree(multialias_repo_path)
 

	
 
        os.mkdir(multialias_repo_path)
 

	
 
        self.assertRaises(VCSError, get_scm, multialias_repo_path)
 

	
 
    def test_get_scm_error_path(self):
 
        self.assertRaises(VCSError, get_scm, 'err')
 

	
 
    def test_get_scms_for_path(self):
 
        dirpath = tempfile.gettempdir()
 
        new = os.path.join(dirpath, 'vcs-scms-for-path-%s' % time.time())
 
        os.mkdir(new)
 
        self.assertEqual(get_scms_for_path(new), [])
 

	
 
        os.mkdir(os.path.join(new, '.tux'))
 
        self.assertEqual(get_scms_for_path(new), [])
 

	
 
        os.mkdir(os.path.join(new, '.git'))
 
        self.assertEqual(set(get_scms_for_path(new)), set(['git']))
 

	
 
        os.mkdir(os.path.join(new, '.hg'))
 
        self.assertEqual(set(get_scms_for_path(new)), set(['git', 'hg']))
 

	
 

	
 
class TestParseChangesets(unittest.TestCase):
 

	
 
    def test_main_is_returned_correctly(self):
 
        self.assertEqual(parse_changesets('123456'), {
 
            'start': None,
 
            'main': '123456',
 
            'end': None,
 
        })
 

	
 
    def test_start_is_returned_correctly(self):
 
        self.assertEqual(parse_changesets('aaabbb..'), {
 
            'start': 'aaabbb',
 
            'main': None,
 
            'end': None,
 
        })
 

	
 
    def test_end_is_returned_correctly(self):
 
        self.assertEqual(parse_changesets('..cccddd'), {
 
            'start': None,
 
            'main': None,
 
            'end': 'cccddd',
 
        })
 

	
 
    def test_that_two_or_three_dots_are_allowed(self):
 
        text1 = 'a..b'
 
        text2 = 'a...b'
 
        self.assertEqual(parse_changesets(text1), parse_changesets(text2))
 

	
 
    def test_that_input_is_stripped_first(self):
 
        text1 = 'a..bb'
 
        text2 = '  a..bb\t\n\t '
 
        self.assertEqual(parse_changesets(text1), parse_changesets(text2))
 

	
 
    def test_that_exception_is_raised(self):
 
        text = '123456.789012' # single dot is not recognized
 
        with self.assertRaises(ValueError):
 
            parse_changesets(text)
 

	
 
    def test_non_alphanumeric_raises_exception(self):
 
        with self.assertRaises(ValueError):
 
            parse_changesets('aaa@bbb')
 

	
 

	
 
class TestParseDatetime(unittest.TestCase):
 

	
 
    def test_datetime_text(self):
 
        self.assertEqual(parse_datetime('2010-04-07 21:29:41'),
 
            datetime.datetime(2010, 4, 7, 21, 29, 41))
 

	
 
    def test_no_seconds(self):
 
        self.assertEqual(parse_datetime('2010-04-07 21:29'),
 
            datetime.datetime(2010, 4, 7, 21, 29))
 

	
 
    def test_date_only(self):
 
        self.assertEqual(parse_datetime('2010-04-07'),
 
            datetime.datetime(2010, 4, 7))
 

	
 
    def test_another_format(self):
 
        self.assertEqual(parse_datetime('04/07/10 21:29:41'),
 
            datetime.datetime(2010, 4, 7, 21, 29, 41))
 

	
 
    def test_now(self):
 
        self.assertTrue(parse_datetime('now') - datetime.datetime.now() <
 
            datetime.timedelta(seconds=1))
 

	
 
    def test_today(self):
 
        today = datetime.date.today()
 
        self.assertEqual(parse_datetime('today'),
 
            datetime.datetime(*today.timetuple()[:3]))
 

	
 
    def test_yesterday(self):
 
        yesterday = datetime.date.today() - datetime.timedelta(days=1)
 
        self.assertEqual(parse_datetime('yesterday'),
 
            datetime.datetime(*yesterday.timetuple()[:3]))
 

	
 
    def test_tomorrow(self):
 
        tomorrow = datetime.date.today() + datetime.timedelta(days=1)
 
        args = tomorrow.timetuple()[:3] + (23, 59, 59)
 
        self.assertEqual(parse_datetime('tomorrow'), datetime.datetime(*args))
 

	
 
    def test_days(self):
 
        timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
 
        args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
 
        expected = datetime.datetime(*args)
 
        self.assertEqual(parse_datetime('3d'), expected)
 
        self.assertEqual(parse_datetime('3 d'), expected)
 
        self.assertEqual(parse_datetime('3 day'), expected)
 
        self.assertEqual(parse_datetime('3 days'), expected)
 

	
 
    def test_weeks(self):
 
        timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
 
        args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
 
        expected = datetime.datetime(*args)
 
        self.assertEqual(parse_datetime('3w'), expected)
 
        self.assertEqual(parse_datetime('3 w'), expected)
 
        self.assertEqual(parse_datetime('3 week'), expected)
 
        self.assertEqual(parse_datetime('3 weeks'), expected)
 

	
 
    def test_mixed(self):
 
        timestamp = datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3)
 
        args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
 
        expected = datetime.datetime(*args)
 
        self.assertEqual(parse_datetime('2w3d'), expected)
 
        self.assertEqual(parse_datetime('2w 3d'), expected)
 
        self.assertEqual(parse_datetime('2w 3 days'), expected)
 
        self.assertEqual(parse_datetime('2 weeks 3 days'), expected)
 

	
 

	
 
class TestAuthorExtractors(unittest.TestCase):
 
    TEST_AUTHORS = [("Username Last'o'Name <username@python-works.com>",
 
                    ("Username Last'o'Name", "username@python-works.com")),
 
                  ("Username Last'o'Name Spaces < username@python-works.com >",
 
                    ("Username Last'o'Name Spaces", "username@python-works.com")),
 
                  ("Username Last'o'Name <username.lastname@python-works.com>",
 
                    ("Username Last'o'Name", "username.lastname@python-works.com")),
 
                  ('mrf RFC_SPEC <username+lastname@python-works.com>',
 
                    ('mrf RFC_SPEC', 'username+lastname@python-works.com')),
 
                  ('username <user@email.com>',
 
                    ('username', 'user@email.com')),
 
                  ('username <user@email.com',
 
                   ('username', 'user@email.com')),
 
                  ('broken missing@email.com',
 
                   ('broken', 'missing@email.com')),
 
                  ('<justemail@mail.com>',
 
                   ('', 'justemail@mail.com')),
 
                  ('justname',
 
                   ('justname', '')),
 
                  ('Mr Double Name withemail@email.com ',
 
                   ('Mr Double Name', 'withemail@email.com')),
 
                  ]
 

	
 
    def test_author_email(self):
 

	
 
        for test_str, result in self.TEST_AUTHORS:
 
            self.assertEqual(result[1], author_email(test_str))
 

	
 

	
 
    def test_author_name(self):
 

	
 
        for test_str, result in self.TEST_AUTHORS:
 
            self.assertEqual(result[0], author_name(test_str))
 

	
 

	
 
class TestGetDictForAttrs(unittest.TestCase):
 

	
 
    def test_returned_dict_has_expected_attrs(self):
 
        obj = mock.Mock()
 
        obj.NOT_INCLUDED = 'this key/value should not be included'
 
        obj.CONST = True
 
        obj.foo = 'aaa'
 
        obj.attrs = {'foo': 'bar'}
 
        obj.date = datetime.datetime(2010, 12, 31)
 
        obj.count = 1001
 

	
 
        self.assertEqual(get_dict_for_attrs(obj, ['CONST', 'foo', 'attrs',
 
            'date', 'count']), {
 
            'CONST': True,
 
            'foo': 'aaa',
 
            'attrs': {'foo': 'bar'},
 
            'date': datetime.datetime(2010, 12, 31),
 
            'count': 1001,
 
        })
 

	
 

	
 
class TestGetTotalSeconds(unittest.TestCase):
 

	
 
    def assertTotalSecondsEqual(self, timedelta, expected_seconds):
 
        result = get_total_seconds(timedelta)
 
        self.assertEqual(result, expected_seconds,
 
            "We computed %s seconds for %s but expected %s"
 
            % (result, timedelta, expected_seconds))
 

	
 
    def test_get_total_seconds_returns_proper_value(self):
 
        self.assertTotalSecondsEqual(datetime.timedelta(seconds=1001), 1001)
 

	
 
    def test_get_total_seconds_returns_proper_value_for_partial_seconds(self):
 
        self.assertTotalSecondsEqual(datetime.timedelta(seconds=50.65), 50.65)
 

	
 

	
 
class TestGetUserHome(unittest.TestCase):
 

	
 
    @mock.patch.object(os, 'environ', {})
 
    def test_defaults_to_none(self):
 
        self.assertEqual(get_user_home(), '')
 

	
 
    @mock.patch.object(os, 'environ', {'HOME': '/home/foobar'})
 
    def test_unix_like(self):
 
        self.assertEqual(get_user_home(), '/home/foobar')
 

	
 
    @mock.patch.object(os, 'environ', {'USERPROFILE': '/Users/foobar'})
 
    def test_windows_like(self):
 
        self.assertEqual(get_user_home(), '/Users/foobar')
 

	
 
    @mock.patch.object(os, 'environ', {'HOME': '/home/foobar',
 
        'USERPROFILE': '/Users/foobar'})
 
    def test_prefers_home_over_userprofile(self):
 
        self.assertEqual(get_user_home(), '/home/foobar')
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_utils_filesize.py
Show inline comments
 
from __future__ import with_statement
 

	
 
from kallithea.lib.vcs.utils.filesize import filesizeformat
 
from kallithea.lib.vcs.utils.compat import unittest
 

	
 

	
 
class TestFilesizeformat(unittest.TestCase):
 

	
 
    def test_bytes(self):
 
        self.assertEqual(filesizeformat(10), '10 B')
 

	
 
    def test_kilobytes(self):
 
        self.assertEqual(filesizeformat(1024 * 2), '2 KB')
 

	
 
    def test_megabytes(self):
 
        self.assertEqual(filesizeformat(1024 * 1024 * 2.3), '2.3 MB')
 

	
 
    def test_gigabytes(self):
 
        self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 12.92), '12.92 GB')
 

	
 
    def test_that_function_respects_sep_paramtere(self):
 
        self.assertEqual(filesizeformat(1, ''), '1B')
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
kallithea/tests/vcs/test_vcs.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import os
 
import shutil
 

	
 
from kallithea.lib.vcs import VCSError, get_repo, get_backend
 
from kallithea.lib.vcs.backends.hg import MercurialRepository
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.conf import TEST_HG_REPO, TEST_GIT_REPO, TEST_TMP_PATH
 

	
 

	
 

	
 
class VCSTest(unittest.TestCase):
 
    """
 
    Tests for main module's methods.
 
    """
 

	
 
    def test_get_backend(self):
 
        hg = get_backend('hg')
 
        self.assertEqual(hg, MercurialRepository)
 

	
 
    def test_alias_detect_hg(self):
 
        alias = 'hg'
 
        path = TEST_HG_REPO
 
        backend = get_backend(alias)
 
        repo = backend(path)
 
        self.assertEqual('hg',repo.alias)
 

	
 
    def test_alias_detect_git(self):
 
        alias = 'git'
 
        path = TEST_GIT_REPO
 
        backend = get_backend(alias)
 
        repo = backend(path)
 
        self.assertEqual('git',repo.alias)
 

	
 
    def test_wrong_alias(self):
 
        alias = 'wrong_alias'
 
        self.assertRaises(VCSError, get_backend, alias)
 

	
 
    def test_get_repo(self):
 
        alias = 'hg'
 
        path = TEST_HG_REPO
 
        backend = get_backend(alias)
 
        repo = backend(path)
 

	
 
        self.assertEqual(repo.__class__, get_repo(path, alias).__class__)
 
        self.assertEqual(repo.path, get_repo(path, alias).path)
 

	
 
    def test_get_repo_autoalias_hg(self):
 
        alias = 'hg'
 
        path = TEST_HG_REPO
 
        backend = get_backend(alias)
 
        repo = backend(path)
 

	
 
        self.assertEqual(repo.__class__, get_repo(path).__class__)
 
        self.assertEqual(repo.path, get_repo(path).path)
 

	
 
    def test_get_repo_autoalias_git(self):
 
        alias = 'git'
 
        path = TEST_GIT_REPO
 
        backend = get_backend(alias)
 
        repo = backend(path)
 

	
 
        self.assertEqual(repo.__class__, get_repo(path).__class__)
 
        self.assertEqual(repo.path, get_repo(path).path)
 

	
 

	
 
    def test_get_repo_err(self):
 
        blank_repo_path = os.path.join(TEST_TMP_PATH, 'blank-error-repo')
 
        if os.path.isdir(blank_repo_path):
 
            shutil.rmtree(blank_repo_path)
 

	
 
        os.mkdir(blank_repo_path)
 
        self.assertRaises(VCSError, get_repo, blank_repo_path)
 
        self.assertRaises(VCSError, get_repo, blank_repo_path + 'non_existing')
 

	
 
    def test_get_repo_multialias(self):
 
        multialias_repo_path = os.path.join(TEST_TMP_PATH, 'hg-git-repo')
 
        if os.path.isdir(multialias_repo_path):
 
            shutil.rmtree(multialias_repo_path)
 

	
 
        os.mkdir(multialias_repo_path)
 

	
 
        os.mkdir(os.path.join(multialias_repo_path, '.git'))
 
        os.mkdir(os.path.join(multialias_repo_path, '.hg'))
 
        self.assertRaises(VCSError, get_repo, multialias_repo_path)
kallithea/tests/vcs/test_workdirs.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import datetime
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.utils.compat import unittest
 
from kallithea.tests.vcs.base import _BackendTestMixin
 
from kallithea.tests.vcs.conf import SCM_TESTS
 

	
 

	
 
class WorkdirTestCaseMixin(_BackendTestMixin):
 

	
 
    @classmethod
 
    def _get_commits(cls):
 
        commits = [
 
            {
 
                'message': u'Initial commit',
 
                'author': u'Joe Doe <joe.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 20),
 
                'added': [
 
                    FileNode('foobar', content='Foobar'),
 
                    FileNode('foobar2', content='Foobar II'),
 
                    FileNode('foo/bar/baz', content='baz here!'),
 
                ],
 
            },
 
            {
 
                'message': u'Changes...',
 
                'author': u'Jane Doe <jane.doe@example.com>',
 
                'date': datetime.datetime(2010, 1, 1, 21),
 
                'added': [
 
                    FileNode('some/new.txt', content='news...'),
 
                ],
 
                'changed': [
 
                    FileNode('foobar', 'Foobar I'),
 
                ],
 
                'removed': [],
 
            },
 
        ]
 
        return commits
 

	
 
    def test_get_branch_for_default_branch(self):
 
        self.assertEqual(self.repo.workdir.get_branch(),
 
            self.repo.DEFAULT_BRANCH_NAME)
 

	
 
    def test_get_branch_after_adding_one(self):
 
        self.imc.add(FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertEqual(self.repo.workdir.get_branch(), self.default_branch)
 

	
 
    def test_get_changeset(self):
 
        old_head = self.repo.get_changeset()
 
        self.imc.add(FileNode('docs/index.txt',
 
            content='Documentation\n'))
 
        head = self.imc.commit(
 
            message=u'New branch: foobar',
 
            author=u'joe',
 
            branch='foobar',
 
        )
 
        self.assertEqual(self.repo.workdir.get_branch(), self.default_branch)
 
        self.repo.workdir.checkout_branch('foobar')
 
        self.assertEqual(self.repo.workdir.get_changeset(), head)
 

	
 
        # Make sure that old head is still there after update to defualt branch
 
        self.repo.workdir.checkout_branch(self.default_branch)
 
        self.assertEqual(self.repo.workdir.get_changeset(), old_head)
 

	
 
    def test_checkout_branch(self):
 
        from kallithea.lib.vcs.exceptions import BranchDoesNotExistError
 
        # first, 'foobranch' does not exist.
 
        self.assertRaises(BranchDoesNotExistError, self.repo.workdir.checkout_branch,
 
                          branch='foobranch')
 
        # create new branch 'foobranch'.
 
        self.imc.add(FileNode('file1', content='blah'))
 
        self.imc.commit(message=u'asd', author=u'john', branch='foobranch')
 
        # go back to the default branch
 
        self.repo.workdir.checkout_branch()
 
        self.assertEqual(self.repo.workdir.get_branch(), self.backend_class.DEFAULT_BRANCH_NAME)
 
        # checkout 'foobranch'
 
        self.repo.workdir.checkout_branch('foobranch')
 
        self.assertEqual(self.repo.workdir.get_branch(), 'foobranch')
 

	
 

	
 
# For each backend create test case class
 
for alias in SCM_TESTS:
 
    attrs = {
 
        'backend_alias': alias,
 
    }
 
    cls_name = ''.join(('%s branch test' % alias).title().split())
 
    bases = (WorkdirTestCaseMixin, unittest.TestCase)
 
    globals()[cls_name] = type(cls_name, bases, attrs)
 

	
 

	
 
if __name__ == '__main__':
 
    unittest.main()
0 comments (0 inline, 0 general)