Changeset - a8e6bb9ee9ea
[Not reviewed]
kallithea/bin/kallithea_api.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_api
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Api CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 3, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import argparse
 
import sys
 

	
 
from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call, json
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] "
 
      "METHOD <key:val> <key2:val> ...\n"
 
      "Create config file: kallithea-api --apikey=<key> --apihost=http://kallithea.example.com --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea API cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file')
 
    group.add_argument('--save-config', action='store_true', help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('API')
 
    group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None,
 
            help='API method name to call followed by key:value attributes',
 
    )
 
    group.add_argument('--format', dest='format', type=str,
 
            help='output format default: `%s` can '
 
                 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    apihost = args.apihost or conf['apihost']
 
    method = args.method
 

	
 
    # if we don't have method here it's an error
 
    if not method:
 
        parser.error('Please specify method name')
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 
        print('Calling method %s => %s' % (method, apihost))
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
        print(json.dumps(json_data))
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
        print('Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
        ))
 
    return 0
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_cli_ishell.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
This file was forked by the Kallithea project in July 2014 and later moved.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import sys
 

	
 
import kallithea.bin.kallithea_cli_base as cli_base
 
from kallithea.model.db import *
 

	
 

	
 
@cli_base.register_command(config_file_initialize_app=True)
 
def ishell():
 
    """Interactive shell for Kallithea."""
 
    try:
 
        from IPython import embed
 
    except ImportError:
 
        print 'Kallithea ishell requires the Python package IPython 4 or later'
 
        print('Kallithea ishell requires the Python package IPython 4 or later')
 
        sys.exit(-1)
 
    from traitlets.config.loader import Config
 
    cfg = Config()
 
    cfg.InteractiveShellEmbed.confirm_exit = False
 
    embed(config=cfg, banner1="Kallithea IShell.")
kallithea/bin/kallithea_gist.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_gist
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Gist CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import argparse
 
import fileinput
 
import os
 
import stat
 
import sys
 

	
 
from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call, json
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] [GIST OPTIONS] "
 
      "[filename or stdin use - for terminal stdin ]\n"
 
      "Create config file: kallithea-gist --apikey=<key> --apihost=http://kallithea.example.com --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea Gist cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea')
 
    group.add_argument('--save-config', action='store_true',
 
                       help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('GIST')
 
    group.add_argument('-p', '--private', action='store_true',
 
                       help='create private Gist')
 
    group.add_argument('-f', '--filename',
 
                       help='set uploaded gist filename, '
 
                            'also defines syntax highlighting')
 
    group.add_argument('-d', '--description', help='Gist description')
 
    group.add_argument('-l', '--lifetime', metavar='MINUTES',
 
                       help='gist lifetime in minutes, -1 (DEFAULT) is forever')
 
    group.add_argument('--format', dest='format', type=str,
 
                       help='output format DEFAULT: `%s` can '
 
                       'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _run(argv):
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    host = args.apihost or conf['apihost']
 
    DEFAULT_FILENAME = 'gistfile1.txt'
 
    if other:
 
        # skip multifiles for now
 
        filename = other[0]
 
        if filename == '-':
 
            filename = DEFAULT_FILENAME
 
            gist_content = ''
 
            for line in fileinput.input('-'):
 
                gist_content += line
 
        else:
 
            with open(filename, 'rb') as f:
 
                gist_content = f.read()
 

	
 
    else:
 
        filename = DEFAULT_FILENAME
 
        gist_content = None
 
        # little bit hacky but cross platform check where the
 
        # stdin comes from we skip the terminal case it can be handled by '-'
 
        mode = os.fstat(0).st_mode
 
        if stat.S_ISFIFO(mode):
 
            # "stdin is piped"
 
            gist_content = sys.stdin.read()
 
        elif stat.S_ISREG(mode):
 
            # "stdin is redirected"
 
            gist_content = sys.stdin.read()
 
        else:
 
            # "stdin is terminal"
 
            pass
 

	
 
    # make sure we don't upload binary stuff
 
    if gist_content and '\0' in gist_content:
 
        raise Exception('Error: binary files upload is not possible')
 

	
 
    filename = os.path.basename(args.filename or filename)
 
    if gist_content:
 
        files = {
 
            filename: {
 
                'content': gist_content,
 
                'lexer': None
 
            }
 
        }
 

	
 
        margs = dict(
 
            lifetime=args.lifetime,
 
            description=args.description,
 
            gist_type='private' if args.private else 'public',
 
            files=files
 
        )
 

	
 
        json_data = api_call(apikey, host, 'create_gist', **margs)['result']
 
        if args.format == FORMAT_JSON:
 
            print json.dumps(json_data)
 
            print(json.dumps(json_data))
 
        elif args.format == FORMAT_PRETTY:
 
            print json_data
 
            print 'Created %s gist %s' % (json_data['gist']['type'],
 
                                          json_data['gist']['url'])
 
            print(json_data)
 
            print('Created %s gist %s' % (json_data['gist']['type'],
 
                                          json_data['gist']['url']))
 
    return 0
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    try:
 
        return _run(argv)
 
    except Exception as e:
 
        print e
 
        print(e)
 
        return 1
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/ldap_sync.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.ldap_sync
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
LDAP sync script
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Mar 06, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import urllib2
 
import uuid
 
from ConfigParser import ConfigParser
 

	
 
import ldap
 

	
 
from kallithea.lib.compat import json
 

	
 

	
 
config = ConfigParser()
 
config.read('ldap_sync.conf')
 

	
 

	
 
class InvalidResponseIDError(Exception):
 
    """ Request and response don't have the same UUID. """
 

	
 

	
 
class ResponseError(Exception):
 
    """ Response has an error, something went wrong with request execution. """
 

	
 

	
 
class UserAlreadyInGroupError(Exception):
 
    """ User is already a member of the target group. """
 

	
 

	
 
class UserNotInGroupError(Exception):
 
    """ User is not a member of the target group. """
 

	
 

	
 
class API(object):
 

	
 
    def __init__(self, url, key):
 
        self.url = url
 
        self.key = key
 

	
 
    def get_api_data(self, uid, method, args):
 
        """Prepare dict for API post."""
 
        return {
 
            "id": uid,
 
            "api_key": self.key,
 
            "method": method,
 
            "args": args
 
        }
 

	
 
    def post(self, method, args):
 
        """Send a generic API post to Kallithea.
 

	
 
        This will generate the UUID for validation check after the
 
        response is returned. Handle errors and get the result back.
 
        """
 
        uid = str(uuid.uuid1())
 
        data = self.get_api_data(uid, method, args)
 

	
 
        data = json.dumps(data)
 
        headers = {'content-type': 'text/plain'}
 
        req = urllib2.Request(self.url, data, headers)
 

	
 
        response = urllib2.urlopen(req)
 
        response = json.load(response)
 

	
 
        if uid != response["id"]:
 
            raise InvalidResponseIDError("UUID does not match.")
 

	
 
        if response["error"] is not None:
 
            raise ResponseError(response["error"])
 

	
 
        return response["result"]
 

	
 
    def create_group(self, name, active=True):
 
        """Create the Kallithea user group."""
 
        args = {
 
            "group_name": name,
 
            "active": str(active)
 
        }
 
        self.post("create_user_group", args)
 

	
 
    def add_membership(self, group, username):
 
        """Add specific user to a group."""
 
        args = {
 
            "usersgroupid": group,
 
            "userid": username
 
        }
 
        result = self.post("add_user_to_user_group", args)
 
        if not result["success"]:
 
            raise UserAlreadyInGroupError("User %s already in group %s." %
 
                                          (username, group))
 

	
 
    def remove_membership(self, group, username):
 
        """Remove specific user from a group."""
 
        args = {
 
            "usersgroupid": group,
 
            "userid": username
 
        }
 
        result = self.post("remove_user_from_user_group", args)
 
        if not result["success"]:
 
            raise UserNotInGroupError("User %s not in group %s." %
 
                                      (username, group))
 

	
 
    def get_group_members(self, name):
 
        """Get the list of member usernames from a user group."""
 
        args = {"usersgroupid": name}
 
        members = self.post("get_user_group", args)['members']
 
        member_list = []
 
        for member in members:
 
            member_list.append(member["username"])
 
        return member_list
 

	
 
    def get_group(self, name):
 
        """Return group info."""
 
        args = {"usersgroupid": name}
 
        return self.post("get_user_group", args)
 

	
 
    def get_user(self, username):
 
        """Return user info."""
 
        args = {"userid": username}
 
        return self.post("get_user", args)
 

	
 

	
 
class LdapClient(object):
 

	
 
    def __init__(self, uri, user, key, base_dn):
 
        self.client = ldap.initialize(uri, trace_level=0)
 
        self.client.set_option(ldap.OPT_REFERRALS, 0)
 
        self.client.simple_bind(user, key)
 
        self.base_dn = base_dn
 

	
 
    def __del__(self):
 
        self.client.unbind()
 

	
 
    def get_groups(self):
 
        """Get all the groups in form of dict {group_name: group_info,...}."""
 
        searchFilter = "objectClass=groupOfUniqueNames"
 
        result = self.client.search_s(self.base_dn, ldap.SCOPE_SUBTREE,
 
                                      searchFilter)
 

	
 
        groups = {}
 
        for group in result:
 
            groups[group[1]['cn'][0]] = group[1]
 

	
 
        return groups
 

	
 
    def get_group_users(self, groups, group):
 
        """Returns all the users belonging to a single group.
 

	
 
        Based on the list of groups and memberships, returns all the
 
        users belonging to a single group, searching recursively.
 
        """
 
        users = []
 
        for member in groups[group]["uniqueMember"]:
 
            member = self.parse_member_string(member)
 
            if member[0] == "uid":
 
                users.append(member[1])
 
            elif member[0] == "cn":
 
                users += self.get_group_users(groups, member[1])
 

	
 
        return users
 

	
 
    def parse_member_string(self, member):
 
        """Parses the member string and returns a touple of type and name.
 

	
 
        Unique member can be either user or group. Users will have 'uid' as
 
        prefix while groups will have 'cn'.
 
        """
 
        member = member.split(",")[0]
 
        return member.split('=')
 

	
 

	
 
class LdapSync(object):
 

	
 
    def __init__(self):
 
        self.ldap_client = LdapClient(config.get("default", "ldap_uri"),
 
                                      config.get("default", "ldap_user"),
 
                                      config.get("default", "ldap_key"),
 
                                      config.get("default", "base_dn"))
 
        self.kallithea_api = API(config.get("default", "api_url"),
 
                                 config.get("default", "api_key"))
 

	
 
    def update_groups_from_ldap(self):
 
        """Add all the groups from LDAP to Kallithea."""
 
        added = existing = 0
 
        groups = self.ldap_client.get_groups()
 
        for group in groups:
 
            try:
 
                self.kallithea_api.create_group(group)
 
                added += 1
 
            except Exception:
 
                existing += 1
 

	
 
        return added, existing
 

	
 
    def update_memberships_from_ldap(self, group):
 
        """Update memberships based on the LDAP groups."""
 
        groups = self.ldap_client.get_groups()
 
        group_users = self.ldap_client.get_group_users(groups, group)
 

	
 
        # Delete memberships first from each group which are not part
 
        # of the group any more.
 
        members = self.kallithea_api.get_group_members(group)
 
        for member in members:
 
            if member not in group_users:
 
                try:
 
                    self.kallithea_api.remove_membership(group,
 
                                                         member)
 
                except UserNotInGroupError:
 
                    pass
 

	
 
        # Add memberships.
 
        for member in group_users:
 
            try:
 
                self.kallithea_api.add_membership(group, member)
 
            except UserAlreadyInGroupError:
 
                # TODO: handle somehow maybe..
 
                pass
 

	
 

	
 
if __name__ == '__main__':
 
    sync = LdapSync()
 
    print sync.update_groups_from_ldap()
 
    print(sync.update_groups_from_ldap())
 

	
 
    for gr in sync.ldap_client.get_groups():
 
        # TODO: exception when user does not exist during add membership...
 
        # How should we handle this.. Either sync users as well at this step,
 
        # or just ignore those who don't exist. If we want the second case,
 
        # we need to find a way to recognize the right exception (we always get
 
        # ResponseError with no error code so maybe by return msg (?)
 
        sync.update_memberships_from_ldap(gr)
kallithea/lib/db_manage.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.db_manage
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Database creation, and setup module for Kallithea. Used for creation
 
of database as well as for migration operations
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 10, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import logging
 
import os
 
import sys
 
import uuid
 

	
 
import alembic.command
 
import alembic.config
 
import sqlalchemy
 
from sqlalchemy.engine import create_engine
 

	
 
from kallithea.model.base import init_model
 
from kallithea.model.db import Permission, RepoGroup, Repository, Setting, Ui, User, UserRepoGroupToPerm, UserToPerm
 
#from kallithea.model import meta
 
from kallithea.model.meta import Base, Session
 
from kallithea.model.permission import PermissionModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.user import UserModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class DbManage(object):
 
    def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None):
 
        self.dbname = dbconf.split('/')[-1]
 
        self.tests = tests
 
        self.root = root
 
        self.dburi = dbconf
 
        self.db_exists = False
 
        self.cli_args = cli_args or {}
 
        self.init_db(SESSION=SESSION)
 

	
 
    def _ask_ok(self, msg):
 
        """Invoke ask_ok unless the force_ask option provides the answer"""
 
        force_ask = self.cli_args.get('force_ask')
 
        if force_ask is not None:
 
            return force_ask
 
        from kallithea.lib.utils2 import ask_ok
 
        return ask_ok(msg)
 

	
 
    def init_db(self, SESSION=None):
 
        if SESSION:
 
            self.sa = SESSION
 
        else:
 
            # init new sessions
 
            engine = create_engine(self.dburi)
 
            init_model(engine)
 
            self.sa = Session()
 

	
 
    def create_tables(self, override=False):
 
        """
 
        Create a auth database
 
        """
 

	
 
        log.info("Any existing database is going to be destroyed")
 
        if self.tests:
 
            destroy = True
 
        else:
 
            destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]')
 
        if not destroy:
 
            print 'Nothing done.'
 
            print('Nothing done.')
 
            sys.exit(0)
 
        if destroy:
 
            # drop and re-create old schemas
 

	
 
            url = sqlalchemy.engine.url.make_url(self.dburi)
 
            database = url.database
 

	
 
            # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work
 
            if url.drivername == 'mysql':
 
                url.database = None  # don't connect to the database (it might not exist)
 
                engine = sqlalchemy.create_engine(url)
 
                with engine.connect() as conn:
 
                    conn.execute('DROP DATABASE IF EXISTS ' + database)
 
                    conn.execute('CREATE DATABASE ' + database)
 
            elif url.drivername == 'postgresql':
 
                from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
 
                url.database = 'postgres'  # connect to the system database (as the real one might not exist)
 
                engine = sqlalchemy.create_engine(url)
 
                with engine.connect() as conn:
 
                    conn.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
 
                    conn.execute('DROP DATABASE IF EXISTS ' + database)
 
                    conn.execute('CREATE DATABASE ' + database)
 
            else:
 
                # known to work on SQLite - possibly not on other databases with strong referential integrity
 
                Base.metadata.drop_all()
 

	
 
        checkfirst = not override
 
        Base.metadata.create_all(checkfirst=checkfirst)
 

	
 
        # Create an Alembic configuration and generate the version table,
 
        # "stamping" it with the most recent Alembic migration revision, to
 
        # tell Alembic that all the schema upgrades are already in effect.
 
        alembic_cfg = alembic.config.Config()
 
        alembic_cfg.set_main_option('script_location', 'kallithea:alembic')
 
        alembic_cfg.set_main_option('sqlalchemy.url', self.dburi)
 
        # This command will give an error in an Alembic multi-head scenario,
 
        # but in practice, such a scenario should not come up during database
 
        # creation, even during development.
 
        alembic.command.stamp(alembic_cfg, 'head')
 

	
 
        log.info('Created tables for %s', self.dbname)
 

	
 
    def fix_repo_paths(self):
 
        """
 
        Fixes a old kallithea version path into new one without a '*'
 
        """
 

	
 
        paths = Ui.query() \
 
                .filter(Ui.ui_key == '/') \
 
                .scalar()
 

	
 
        paths.ui_value = paths.ui_value.replace('*', '')
 

	
 
        self.sa.commit()
 

	
 
    def fix_default_user(self):
 
        """
 
        Fixes a old default user with some 'nicer' default values,
 
        used mostly for anonymous access
 
        """
 
        def_user = User.query().filter_by(is_default_user=True).one()
 

	
 
        def_user.name = 'Anonymous'
 
        def_user.lastname = 'User'
 
        def_user.email = 'anonymous@kallithea-scm.org'
 

	
 
        self.sa.commit()
 

	
 
    def fix_settings(self):
 
        """
 
        Fixes kallithea settings adds ga_code key for google analytics
 
        """
 

	
 
        hgsettings3 = Setting('ga_code', '')
 

	
 
        self.sa.add(hgsettings3)
 
        self.sa.commit()
 

	
 
    def admin_prompt(self, second=False):
 
        if not self.tests:
 
            import getpass
 

	
 
            username = self.cli_args.get('username')
 
            password = self.cli_args.get('password')
 
            email = self.cli_args.get('email')
 

	
 
            def get_password():
 
                password = getpass.getpass('Specify admin password '
 
                                           '(min 6 chars):')
 
                confirm = getpass.getpass('Confirm password:')
 

	
 
                if password != confirm:
 
                    log.error('passwords mismatch')
 
                    return False
 
                if len(password) < 6:
 
                    log.error('password is to short use at least 6 characters')
 
                    return False
 

	
 
                return password
 
            if username is None:
 
                username = raw_input('Specify admin username:')
 
            if password is None:
 
                password = get_password()
 
                if not password:
 
                    # second try
 
                    password = get_password()
 
                    if not password:
 
                        sys.exit()
 
            if email is None:
 
                email = raw_input('Specify admin email:')
 
            self.create_user(username, password, email, True)
 
        else:
 
            log.info('creating admin and regular test users')
 
            from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \
 
                TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
 
                TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
 
                TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
 
                TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
 

	
 
            self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
 
                             TEST_USER_ADMIN_EMAIL, True)
 

	
 
            self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
 
                             TEST_USER_REGULAR_EMAIL, False)
 

	
 
            self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
 
                             TEST_USER_REGULAR2_EMAIL, False)
 

	
 
    def create_auth_plugin_options(self, skip_existing=False):
 
        """
 
        Create default auth plugin settings, and make it active
 

	
 
        :param skip_existing:
 
        """
 

	
 
        for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'),
 
                        ('auth_internal_enabled', 'True', 'bool')]:
 
            if skip_existing and Setting.get_by_name(k) is not None:
 
                log.debug('Skipping option %s', k)
 
                continue
 
            setting = Setting(k, v, t)
 
            self.sa.add(setting)
 

	
 
    def create_default_options(self, skip_existing=False):
 
        """Creates default settings"""
 

	
 
        for k, v, t in [
 
            ('default_repo_enable_downloads', False, 'bool'),
 
            ('default_repo_enable_statistics', False, 'bool'),
 
            ('default_repo_private', False, 'bool'),
 
            ('default_repo_type', 'hg', 'unicode')
 
        ]:
 
            if skip_existing and Setting.get_by_name(k) is not None:
 
                log.debug('Skipping option %s', k)
 
                continue
 
            setting = Setting(k, v, t)
 
            self.sa.add(setting)
 

	
 
    def fixup_groups(self):
 
        def_usr = User.get_default_user()
 
        for g in RepoGroup.query().all():
 
            g.group_name = g.get_new_name(g.name)
 
            # get default perm
 
            default = UserRepoGroupToPerm.query() \
 
                .filter(UserRepoGroupToPerm.group == g) \
 
                .filter(UserRepoGroupToPerm.user == def_usr) \
 
                .scalar()
 

	
 
            if default is None:
 
                log.debug('missing default permission for group %s adding', g)
 
                RepoGroupModel()._create_default_perms(g)
 

	
 
    def reset_permissions(self, username):
 
        """
 
        Resets permissions to default state, useful when old systems had
 
        bad permissions, we must clean them up
 

	
 
        :param username:
 
        """
 
        default_user = User.get_by_username(username)
 
        if not default_user:
 
            return
 

	
 
        u2p = UserToPerm.query() \
 
            .filter(UserToPerm.user == default_user).all()
 
        fixed = False
 
        if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
 
            for p in u2p:
 
                Session().delete(p)
 
            fixed = True
 
            self.populate_default_permissions()
 
        return fixed
 

	
 
    def update_repo_info(self):
 
        for repo in Repository.query():
 
            repo.update_changeset_cache()
 

	
 
    def prompt_repo_root_path(self, test_repo_path='', retries=3):
 
        _path = self.cli_args.get('repos_location')
 
        if retries == 3:
 
            log.info('Setting up repositories config')
 

	
 
        if _path is not None:
 
            path = _path
 
        elif not self.tests and not test_repo_path:
 
            path = raw_input(
 
                 'Enter a valid absolute path to store repositories. '
 
                 'All repositories in that path will be added automatically:'
 
            )
 
        else:
 
            path = test_repo_path
 
        path_ok = True
 

	
 
        # check proper dir
 
        if not os.path.isdir(path):
 
            path_ok = False
 
            log.error('Given path %s is not a valid directory', path)
 

	
 
        elif not os.path.isabs(path):
 
            path_ok = False
 
            log.error('Given path %s is not an absolute path', path)
 

	
 
        # check if path is at least readable.
 
        if not os.access(path, os.R_OK):
 
            path_ok = False
 
            log.error('Given path %s is not readable', path)
 

	
 
        # check write access, warn user about non writeable paths
 
        elif not os.access(path, os.W_OK) and path_ok:
 
            log.warning('No write permission to given path %s', path)
 
            if not self._ask_ok('Given path %s is not writeable, do you want to '
 
                          'continue with read only mode ? [y/n]' % (path,)):
 
                log.error('Canceled by user')
 
                sys.exit(-1)
 

	
 
        if retries == 0:
 
            sys.exit('max retries reached')
 
        if not path_ok:
 
            if _path is not None:
 
                sys.exit('Invalid repo path: %s' % _path)
 
            retries -= 1
 
            return self.prompt_repo_root_path(test_repo_path, retries) # recursing!!!
 

	
 
        real_path = os.path.normpath(os.path.realpath(path))
 

	
 
        if real_path != os.path.normpath(path):
 
            log.warning('Using normalized path %s instead of %s', real_path, path)
 

	
 
        return real_path
 

	
 
    def create_settings(self, repo_root_path):
 
        ui_config = [
 
            ('paths', '/', repo_root_path, True),
 
            #('phases', 'publish', 'false', False)
 
            ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False),
 
            ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True),
 
            ('extensions', 'largefiles', '', True),
 
            ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True),
 
            ('extensions', 'hgsubversion', '', False),
 
            ('extensions', 'hggit', '', False),
 
        ]
 
        for ui_section, ui_key, ui_value, ui_active in ui_config:
 
            ui_conf = Ui(
 
                ui_section=ui_section,
 
                ui_key=ui_key,
 
                ui_value=ui_value,
 
                ui_active=ui_active)
 
            self.sa.add(ui_conf)
 

	
 
        settings = [
 
            ('realm', 'Kallithea', 'unicode'),
 
            ('title', '', 'unicode'),
 
            ('ga_code', '', 'unicode'),
 
            ('show_public_icon', True, 'bool'),
 
            ('show_private_icon', True, 'bool'),
 
            ('stylify_metalabels', False, 'bool'),
 
            ('dashboard_items', 100, 'int'), # TODO: call it page_size
 
            ('admin_grid_items', 25, 'int'),
 
            ('show_version', True, 'bool'),
 
            ('use_gravatar', True, 'bool'),
 
            ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
 
            ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
 
            ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'),
 
        ]
 
        for key, val, type_ in settings:
 
            sett = Setting(key, val, type_)
 
            self.sa.add(sett)
 

	
 
        self.create_auth_plugin_options()
 
        self.create_default_options()
 

	
 
        log.info('Populated Ui and Settings defaults')
 

	
 
    def create_user(self, username, password, email='', admin=False):
 
        log.info('creating user %s', username)
 
        UserModel().create_or_update(username, password, email,
 
                                     firstname=u'Kallithea', lastname=u'Admin',
 
                                     active=True, admin=admin,
 
                                     extern_type=User.DEFAULT_AUTH_TYPE)
 

	
 
    def create_default_user(self):
 
        log.info('creating default user')
 
        # create default user for handling default permissions.
 
        user = UserModel().create_or_update(username=User.DEFAULT_USER,
 
                                            password=str(uuid.uuid1())[:20],
 
                                            email='anonymous@kallithea-scm.org',
 
                                            firstname=u'Anonymous',
 
                                            lastname=u'User')
 
        # based on configuration options activate/deactivate this user which
 
        # controls anonymous access
 
        if self.cli_args.get('public_access') is False:
 
            log.info('Public access disabled')
 
            user.active = False
 
            Session().commit()
 

	
 
    def create_permissions(self):
 
        """
 
        Creates all permissions defined in the system
 
        """
 
        # module.(access|create|change|delete)_[name]
 
        # module.(none|read|write|admin)
 
        log.info('creating permissions')
 
        PermissionModel().create_permissions()
 

	
 
    def populate_default_permissions(self):
 
        """
 
        Populate default permissions. It will create only the default
 
        permissions that are missing, and not alter already defined ones
 
        """
 
        log.info('creating default user permissions')
 
        PermissionModel().create_default_permissions(user=User.DEFAULT_USER)
kallithea/lib/pidlock.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from __future__ import print_function
 

	
 
import errno
 
import os
 
from multiprocessing.util import Finalize
 

	
 
from kallithea.lib.compat import kill
 

	
 

	
 
class LockHeld(Exception):
 
    pass
 

	
 

	
 
class DaemonLock(object):
 
    """daemon locking
 
    USAGE:
 
    try:
 
        l = DaemonLock('/path/tolockfile',desc='test lock')
 
        main()
 
        l.release()
 
    except LockHeld:
 
        sys.exit(1)
 
    """
 

	
 
    def __init__(self, file_, callbackfn=None,
 
                 desc='daemon lock', debug=False):
 
        self.pidfile = file_
 
        self.callbackfn = callbackfn
 
        self.desc = desc
 
        self.debug = debug
 
        self.held = False
 
        # run the lock automatically!
 
        self.lock()
 
        self._finalize = Finalize(self, DaemonLock._on_finalize,
 
                                  args=(self, debug), exitpriority=10)
 

	
 
    @staticmethod
 
    def _on_finalize(lock, debug):
 
        if lock.held:
 
            if debug:
 
                print 'lock held finalizing and running lock.release()'
 
                print('lock held finalizing and running lock.release()')
 
            lock.release()
 

	
 
    def lock(self):
 
        """
 
        locking function, if lock is present it
 
        will raise LockHeld exception
 
        """
 
        lockname = str(os.getpid())
 
        if self.debug:
 
            print 'running lock'
 
            print('running lock')
 
        self.trylock()
 
        self.makelock(lockname, self.pidfile)
 
        return True
 

	
 
    def trylock(self):
 
        running_pid = False
 
        if self.debug:
 
            print 'checking for already running process'
 
            print('checking for already running process')
 
        try:
 
            with open(self.pidfile, 'r') as f:
 
                try:
 
                    running_pid = int(f.readline())
 
                except ValueError:
 
                    running_pid = -1
 

	
 
            if self.debug:
 
                print ('lock file present running_pid: %s, '
 
                       'checking for execution' % (running_pid,))
 
                print('lock file present running_pid: %s, '
 
                      'checking for execution' % (running_pid,))
 
            # Now we check the PID from lock file matches to the current
 
            # process PID
 
            if running_pid:
 
                try:
 
                    kill(running_pid, 0)
 
                except OSError as exc:
 
                    if exc.errno in (errno.ESRCH, errno.EPERM):
 
                        print ("Lock File is there but"
 
                               " the program is not running")
 
                        print "Removing lock file for the: %s" % running_pid
 
                        print("Removing lock file for the: %s" % running_pid)
 
                        self.release()
 
                    else:
 
                        raise
 
                else:
 
                    print "You already have an instance of the program running"
 
                    print "It is running as process %s" % running_pid
 
                    print("You already have an instance of the program running")
 
                    print("It is running as process %s" % running_pid)
 
                    raise LockHeld()
 

	
 
        except IOError as e:
 
            if e.errno != 2:
 
                raise
 

	
 
    def release(self):
 
        """releases the pid by removing the pidfile
 
        """
 
        if self.debug:
 
            print 'trying to release the pidlock'
 
            print('trying to release the pidlock')
 

	
 
        if self.callbackfn:
 
            #execute callback function on release
 
            if self.debug:
 
                print 'executing callback function %s' % self.callbackfn
 
                print('executing callback function %s' % self.callbackfn)
 
            self.callbackfn()
 
        try:
 
            if self.debug:
 
                print 'removing pidfile %s' % self.pidfile
 
                print('removing pidfile %s' % self.pidfile)
 
            os.remove(self.pidfile)
 
            self.held = False
 
        except OSError as e:
 
            if self.debug:
 
                print 'removing pidfile failed %s' % e
 
                print('removing pidfile failed %s' % e)
 
            pass
 

	
 
    def makelock(self, lockname, pidfile):
 
        """
 
        this function will make an actual lock
 

	
 
        :param lockname: actual pid of file
 
        :param pidfile: the file to write the pid in
 
        """
 
        if self.debug:
 
            print 'creating a file %s and pid: %s' % (pidfile, lockname)
 
            print('creating a file %s and pid: %s' % (pidfile, lockname))
 

	
 
        dir_, file_ = os.path.split(pidfile)
 
        if not os.path.isdir(dir_):
 
            os.makedirs(dir_)
 
        with open(self.pidfile, 'wb') as f:
 
            f.write(lockname)
 
        self.held = True
kallithea/lib/utils2.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.utils2
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Some simple helper functions.
 
Note: all these functions should be independent of Kallithea classes, i.e.
 
models, controllers, etc.  to prevent import cycles.
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 5, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import binascii
 
import datetime
 
import os
 
import pwd
 
import re
 
import time
 
import urllib
 

	
 
import urlobject
 
from tg.i18n import ugettext as _
 
from tg.i18n import ungettext
 
from webhelpers2.text import collapse, remove_formatting, strip_tags
 

	
 
from kallithea.lib.compat import json
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
def str2bool(_str):
 
    """
 
    returns True/False value from given string, it tries to translate the
 
    string into boolean
 

	
 
    :param _str: string value to translate into boolean
 
    :rtype: boolean
 
    :returns: boolean from given string
 
    """
 
    if _str is None:
 
        return False
 
    if _str in (True, False):
 
        return _str
 
    _str = str(_str).strip().lower()
 
    return _str in ('t', 'true', 'y', 'yes', 'on', '1')
 

	
 

	
 
def aslist(obj, sep=None, strip=True):
 
    """
 
    Returns given string separated by sep as list
 

	
 
    :param obj:
 
    :param sep:
 
    :param strip:
 
    """
 
    if isinstance(obj, (basestring)):
 
        lst = obj.split(sep)
 
        if strip:
 
            lst = [v.strip() for v in lst]
 
        return lst
 
    elif isinstance(obj, (list, tuple)):
 
        return obj
 
    elif obj is None:
 
        return []
 
    else:
 
        return [obj]
 

	
 

	
 
def convert_line_endings(line, mode):
 
    """
 
    Converts a given line  "line end" according to given mode
 

	
 
    Available modes are::
 
        0 - Unix
 
        1 - Mac
 
        2 - DOS
 

	
 
    :param line: given line to convert
 
    :param mode: mode to convert to
 
    :rtype: str
 
    :return: converted line according to mode
 
    """
 
    from string import replace
 

	
 
    if mode == 0:
 
        line = replace(line, '\r\n', '\n')
 
        line = replace(line, '\r', '\n')
 
    elif mode == 1:
 
        line = replace(line, '\r\n', '\r')
 
        line = replace(line, '\n', '\r')
 
    elif mode == 2:
 
        line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line)
 
    return line
 

	
 

	
 
def detect_mode(line, default):
 
    """
 
    Detects line break for given line, if line break couldn't be found
 
    given default value is returned
 

	
 
    :param line: str line
 
    :param default: default
 
    :rtype: int
 
    :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
 
    """
 
    if line.endswith('\r\n'):
 
        return 2
 
    elif line.endswith('\n'):
 
        return 0
 
    elif line.endswith('\r'):
 
        return 1
 
    else:
 
        return default
 

	
 

	
 
def generate_api_key():
 
    """
 
    Generates a random (presumably unique) API key.
 

	
 
    This value is used in URLs and "Bearer" HTTP Authorization headers,
 
    which in practice means it should only contain URL-safe characters
 
    (RFC 3986):
 

	
 
        unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
 
    """
 
    # Hexadecimal certainly qualifies as URL-safe.
 
    return binascii.hexlify(os.urandom(20))
 

	
 

	
 
def safe_int(val, default=None):
 
    """
 
    Returns int() of val if val is not convertable to int use default
 
    instead
 

	
 
    :param val:
 
    :param default:
 
    """
 

	
 
    try:
 
        val = int(val)
 
    except (ValueError, TypeError):
 
        val = default
 

	
 
    return val
 

	
 

	
 
def safe_unicode(str_, from_encoding=None):
 
    """
 
    safe unicode function. Does few trick to turn str_ into unicode
 

	
 
    In case of UnicodeDecode error we try to return it with encoding detected
 
    by chardet library if it fails fallback to unicode with errors replaced
 

	
 
    :param str_: string to decode
 
    :rtype: unicode
 
    :returns: unicode object
 
    """
 
    if isinstance(str_, unicode):
 
        return str_
 

	
 
    if not from_encoding:
 
        import kallithea
 
        DEFAULT_ENCODINGS = aslist(kallithea.CONFIG.get('default_encoding',
 
                                                        'utf-8'), sep=',')
 
        from_encoding = DEFAULT_ENCODINGS
 

	
 
    if not isinstance(from_encoding, (list, tuple)):
 
        from_encoding = [from_encoding]
 

	
 
    try:
 
        return unicode(str_)
 
    except UnicodeDecodeError:
 
        pass
 

	
 
    for enc in from_encoding:
 
        try:
 
            return unicode(str_, enc)
 
        except UnicodeDecodeError:
 
            pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(str_)['encoding']
 
        if encoding is None:
 
            raise Exception()
 
        return str_.decode(encoding)
 
    except (ImportError, UnicodeDecodeError, Exception):
 
        return unicode(str_, from_encoding[0], 'replace')
 

	
 

	
 
def safe_str(unicode_, to_encoding=None):
 
    """
 
    safe str function. Does few trick to turn unicode_ into string
 

	
 
    In case of UnicodeEncodeError we try to return it with encoding detected
 
    by chardet library if it fails fallback to string with errors replaced
 

	
 
    :param unicode_: unicode to encode
 
    :rtype: str
 
    :returns: str object
 
    """
 

	
 
    # if it's not basestr cast to str
 
    if not isinstance(unicode_, basestring):
 
        return str(unicode_)
 

	
 
    if isinstance(unicode_, str):
 
        return unicode_
 

	
 
    if not to_encoding:
 
        import kallithea
 
        DEFAULT_ENCODINGS = aslist(kallithea.CONFIG.get('default_encoding',
 
                                                        'utf-8'), sep=',')
 
        to_encoding = DEFAULT_ENCODINGS
 

	
 
    if not isinstance(to_encoding, (list, tuple)):
 
        to_encoding = [to_encoding]
 

	
 
    for enc in to_encoding:
 
        try:
 
            return unicode_.encode(enc)
 
        except UnicodeEncodeError:
 
            pass
 

	
 
    try:
 
        import chardet
 
        encoding = chardet.detect(unicode_)['encoding']
 
        if encoding is None:
 
            raise UnicodeEncodeError()
 

	
 
        return unicode_.encode(encoding)
 
    except (ImportError, UnicodeEncodeError):
 
        return unicode_.encode(to_encoding[0], 'replace')
 

	
 

	
 
def remove_suffix(s, suffix):
 
    if s.endswith(suffix):
 
        s = s[:-1 * len(suffix)]
 
    return s
 

	
 

	
 
def remove_prefix(s, prefix):
 
    if s.startswith(prefix):
 
        s = s[len(prefix):]
 
    return s
 

	
 

	
 
def age(prevdate, show_short_version=False, now=None):
 
    """
 
    turns a datetime into an age string.
 
    If show_short_version is True, then it will generate a not so accurate but shorter string,
 
    example: 2days ago, instead of 2 days and 23 hours ago.
 

	
 
    :param prevdate: datetime object
 
    :param show_short_version: if it should approximate the date and return a shorter string
 
    :rtype: unicode
 
    :returns: unicode words describing age
 
    """
 
    now = now or datetime.datetime.now()
 
    order = ['year', 'month', 'day', 'hour', 'minute', 'second']
 
    deltas = {}
 
    future = False
 

	
 
    if prevdate > now:
 
        now, prevdate = prevdate, now
 
        future = True
 
    if future:
 
        prevdate = prevdate.replace(microsecond=0)
 
    # Get date parts deltas
 
    from dateutil import relativedelta
 
    for part in order:
 
        d = relativedelta.relativedelta(now, prevdate)
 
        deltas[part] = getattr(d, part + 's')
 

	
 
    # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
 
    # not 1 hour, -59 minutes and -59 seconds)
 
    for num, length in [(5, 60), (4, 60), (3, 24)]:  # seconds, minutes, hours
 
        part = order[num]
 
        carry_part = order[num - 1]
 

	
 
        if deltas[part] < 0:
 
            deltas[part] += length
 
            deltas[carry_part] -= 1
 

	
 
    # Same thing for days except that the increment depends on the (variable)
 
    # number of days in the month
 
    month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
 
    if deltas['day'] < 0:
 
        if prevdate.month == 2 and (prevdate.year % 4 == 0 and
 
            (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)
 
        ):
 
            deltas['day'] += 29
 
        else:
 
            deltas['day'] += month_lengths[prevdate.month - 1]
 

	
 
        deltas['month'] -= 1
 

	
 
    if deltas['month'] < 0:
 
        deltas['month'] += 12
 
        deltas['year'] -= 1
 

	
 
    # In short version, we want nicer handling of ages of more than a year
 
    if show_short_version:
 
        if deltas['year'] == 1:
 
            # ages between 1 and 2 years: show as months
 
            deltas['month'] += 12
 
            deltas['year'] = 0
 
        if deltas['year'] >= 2:
 
            # ages 2+ years: round
 
            if deltas['month'] > 6:
 
                deltas['year'] += 1
 
                deltas['month'] = 0
 

	
 
    # Format the result
 
    fmt_funcs = {
 
        'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
 
        'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
 
        'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
 
        'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
 
        'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
 
        'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
 
    }
 

	
 
    for i, part in enumerate(order):
 
        value = deltas[part]
 
        if value == 0:
 
            continue
 

	
 
        if i < 5:
 
            sub_part = order[i + 1]
 
            sub_value = deltas[sub_part]
 
        else:
 
            sub_value = 0
 

	
 
        if sub_value == 0 or show_short_version:
 
            if future:
 
                return _('in %s') % fmt_funcs[part](value)
 
            else:
 
                return _('%s ago') % fmt_funcs[part](value)
 
        if future:
 
            return _('in %s and %s') % (fmt_funcs[part](value),
 
                fmt_funcs[sub_part](sub_value))
 
        else:
 
            return _('%s and %s ago') % (fmt_funcs[part](value),
 
                fmt_funcs[sub_part](sub_value))
 

	
 
    return _('just now')
 

	
 

	
 
def uri_filter(uri):
 
    """
 
    Removes user:password from given url string
 

	
 
    :param uri:
 
    :rtype: unicode
 
    :returns: filtered list of strings
 
    """
 
    if not uri:
 
        return ''
 

	
 
    proto = ''
 

	
 
    for pat in ('https://', 'http://', 'git://'):
 
        if uri.startswith(pat):
 
            uri = uri[len(pat):]
 
            proto = pat
 
            break
 

	
 
    # remove passwords and username
 
    uri = uri[uri.find('@') + 1:]
 

	
 
    # get the port
 
    cred_pos = uri.find(':')
 
    if cred_pos == -1:
 
        host, port = uri, None
 
    else:
 
        host, port = uri[:cred_pos], uri[cred_pos + 1:]
 

	
 
    return filter(None, [proto, host, port])
 

	
 

	
 
def credentials_filter(uri):
 
    """
 
    Returns a url with removed credentials
 

	
 
    :param uri:
 
    """
 

	
 
    uri = uri_filter(uri)
 
    # check if we have port
 
    if len(uri) > 2 and uri[2]:
 
        uri[2] = ':' + uri[2]
 

	
 
    return ''.join(uri)
 

	
 

	
 
def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None):
 
    parsed_url = urlobject.URLObject(prefix_url)
 
    prefix = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
 
    try:
 
        system_user = pwd.getpwuid(os.getuid()).pw_name
 
    except Exception: # TODO: support all systems - especially Windows
 
        system_user = 'kallithea' # hardcoded default value ...
 
    args = {
 
        'scheme': parsed_url.scheme,
 
        'user': safe_unicode(urllib.quote(safe_str(username or ''))),
 
        'netloc': parsed_url.netloc + prefix,  # like "hostname:port/prefix" (with optional ":port" and "/prefix")
 
        'prefix': prefix, # undocumented, empty or starting with /
 
        'repo': repo_name,
 
        'repoid': str(repo_id),
 
        'system_user': safe_unicode(system_user),
 
        'hostname': parsed_url.hostname,
 
    }
 
    url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl)
 

	
 
    # remove leading @ sign if it's present. Case of empty user
 
    url_obj = urlobject.URLObject(url)
 
    if not url_obj.username:
 
        url_obj = url_obj.with_username(None)
 

	
 
    return safe_unicode(url_obj)
 

	
 

	
 
def get_changeset_safe(repo, rev):
 
    """
 
    Safe version of get_changeset if this changeset doesn't exists for a
 
    repo it returns a Dummy one instead
 

	
 
    :param repo:
 
    :param rev:
 
    """
 
    from kallithea.lib.vcs.backends.base import BaseRepository
 
    from kallithea.lib.vcs.exceptions import RepositoryError
 
    from kallithea.lib.vcs.backends.base import EmptyChangeset
 
    if not isinstance(repo, BaseRepository):
 
        raise Exception('You must pass an Repository '
 
                        'object as first argument got %s' % type(repo))
 

	
 
    try:
 
        cs = repo.get_changeset(rev)
 
    except (RepositoryError, LookupError):
 
        cs = EmptyChangeset(requested_revision=rev)
 
    return cs
 

	
 

	
 
def datetime_to_time(dt):
 
    if dt:
 
        return time.mktime(dt.timetuple())
 

	
 

	
 
def time_to_datetime(tm):
 
    if tm:
 
        if isinstance(tm, basestring):
 
            try:
 
                tm = float(tm)
 
            except ValueError:
 
                return
 
        return datetime.datetime.fromtimestamp(tm)
 

	
 

	
 
# Must match regexp in kallithea/public/js/base.js MentionsAutoComplete()
 
# Check char before @ - it must not look like we are in an email addresses.
 
# Matching is greedy so we don't have to look beyond the end.
 
MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])')
 

	
 

	
 
def extract_mentioned_usernames(text):
 
    r"""
 
    Returns list of (possible) usernames @mentioned in given text.
 

	
 
    >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,')
 
    ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz']
 
    """
 
    return MENTIONS_REGEX.findall(text)
 

	
 

	
 
def extract_mentioned_users(text):
 
    """ Returns set of actual database Users @mentioned in given text. """
 
    from kallithea.model.db import User
 
    result = set()
 
    for name in extract_mentioned_usernames(text):
 
        user = User.get_by_username(name, case_insensitive=True)
 
        if user is not None and not user.is_default_user:
 
            result.add(user)
 
    return result
 

	
 

	
 
class AttributeDict(dict):
 
    def __getattr__(self, attr):
 
        return self.get(attr, None)
 
    __setattr__ = dict.__setitem__
 
    __delattr__ = dict.__delitem__
 

	
 

	
 
def obfuscate_url_pw(engine):
 
    from sqlalchemy.engine import url as sa_url
 
    from sqlalchemy.exc import ArgumentError
 
    try:
 
        _url = sa_url.make_url(engine or '')
 
    except ArgumentError:
 
        return engine
 
    if _url.password:
 
        _url.password = 'XXXXX'
 
    return str(_url)
 

	
 

	
 
def get_hook_environment():
 
    """
 
    Get hook context by deserializing the global KALLITHEA_EXTRAS environment
 
    variable.
 

	
 
    Called early in Git out-of-process hooks to get .ini config path so the
 
    basic environment can be configured properly. Also used in all hooks to get
 
    information about the action that triggered it.
 
    """
 

	
 
    try:
 
        extras = json.loads(os.environ['KALLITHEA_EXTRAS'])
 
    except KeyError:
 
        raise Exception("Environment variable KALLITHEA_EXTRAS not found")
 

	
 
    try:
 
        for k in ['username', 'repository', 'scm', 'action', 'ip']:
 
            extras[k]
 
    except KeyError:
 
        raise Exception('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras))
 

	
 
    return AttributeDict(extras)
 

	
 

	
 
def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None):
 
    """Prepare global context for running hooks by serializing data in the
 
    global KALLITHEA_EXTRAS environment variable.
 

	
 
    Most importantly, this allow Git hooks to do proper logging and updating of
 
    caches after pushes.
 

	
 
    Must always be called before anything with hooks are invoked.
 
    """
 
    from kallithea import CONFIG
 
    extras = {
 
        'ip': ip_addr, # used in log_push/pull_action action_logger
 
        'username': username,
 
        'action': action or 'push_local', # used in log_push_action_raw_ids action_logger
 
        'repository': repo_name,
 
        'scm': repo_alias, # used to pick hack in log_push_action_raw_ids
 
        'config': CONFIG['__file__'], # used by git hook to read config
 
    }
 
    os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras)
 

	
 

	
 
def get_current_authuser():
 
    """
 
    Gets kallithea user from threadlocal tmpl_context variable if it's
 
    defined, else returns None.
 
    """
 
    from tg import tmpl_context
 
    if hasattr(tmpl_context, 'authuser'):
 
        return tmpl_context.authuser
 

	
 
    return None
 

	
 

	
 
class OptionalAttr(object):
 
    """
 
    Special Optional Option that defines other attribute. Example::
 

	
 
        def test(apiuser, userid=Optional(OAttr('apiuser')):
 
            user = Optional.extract(userid)
 
            # calls
 

	
 
    """
 

	
 
    def __init__(self, attr_name):
 
        self.attr_name = attr_name
 

	
 
    def __repr__(self):
 
        return '<OptionalAttr:%s>' % self.attr_name
 

	
 
    def __call__(self):
 
        return self
 

	
 

	
 
# alias
 
OAttr = OptionalAttr
 

	
 

	
 
class Optional(object):
 
    """
 
    Defines an optional parameter::
 

	
 
        param = param.getval() if isinstance(param, Optional) else param
 
        param = param() if isinstance(param, Optional) else param
 

	
 
    is equivalent of::
 

	
 
        param = Optional.extract(param)
 

	
 
    """
 

	
 
    def __init__(self, type_):
 
        self.type_ = type_
 

	
 
    def __repr__(self):
 
        return '<Optional:%s>' % self.type_.__repr__()
 

	
 
    def __call__(self):
 
        return self.getval()
 

	
 
    def getval(self):
 
        """
 
        returns value from this Optional instance
 
        """
 
        if isinstance(self.type_, OAttr):
 
            # use params name
 
            return self.type_.attr_name
 
        return self.type_
 

	
 
    @classmethod
 
    def extract(cls, val):
 
        """
 
        Extracts value from Optional() instance
 

	
 
        :param val:
 
        :return: original value if it's not Optional instance else
 
            value of instance
 
        """
 
        if isinstance(val, cls):
 
            return val.getval()
 
        return val
 

	
 

	
 
def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub):
 
    return _cleanstringsub('_', safe_str(s)).rstrip('_')
 

	
 

	
 
def recursive_replace(str_, replace=' '):
 
    """
 
    Recursive replace of given sign to just one instance
 

	
 
    :param str_: given string
 
    :param replace: char to find and replace multiple instances
 

	
 
    Examples::
 
    >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
 
    'Mighty-Mighty-Bo-sstones'
 
    """
 

	
 
    if str_.find(replace * 2) == -1:
 
        return str_
 
    else:
 
        str_ = str_.replace(replace * 2, replace)
 
        return recursive_replace(str_, replace)
 

	
 

	
 
def repo_name_slug(value):
 
    """
 
    Return slug of name of repository
 
    This function is called on each creation/modification
 
    of repository to prevent bad names in repo
 
    """
 

	
 
    slug = remove_formatting(value)
 
    slug = strip_tags(slug)
 

	
 
    for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
 
        slug = slug.replace(c, '-')
 
    slug = recursive_replace(slug, '-')
 
    slug = collapse(slug, '-')
 
    return slug
 

	
 

	
 
def ask_ok(prompt, retries=4, complaint='Yes or no please!'):
 
    while True:
 
        ok = raw_input(prompt)
 
        if ok in ('y', 'ye', 'yes'):
 
            return True
 
        if ok in ('n', 'no', 'nop', 'nope'):
 
            return False
 
        retries = retries - 1
 
        if retries < 0:
 
            raise IOError
 
        print complaint
 
        print(complaint)
kallithea/lib/vcs/utils/progressbar.py
Show inline comments
 
# encoding: UTF-8
 

	
 
from __future__ import print_function
 

	
 
import datetime
 
import string
 
import sys
 

	
 
from kallithea.lib.vcs.utils.filesize import filesizeformat
 

	
 

	
 
class ProgressBarError(Exception):
 
    pass
 

	
 

	
 
class AlreadyFinishedError(ProgressBarError):
 
    pass
 

	
 

	
 
class ProgressBar(object):
 

	
 
    default_elements = ['percentage', 'bar', 'steps']
 

	
 
    def __init__(self, steps=100, stream=None, elements=None):
 
        self.step = 0
 
        self.steps = steps
 
        self.stream = stream or sys.stderr
 
        self.bar_char = '='
 
        self.width = 50
 
        self.separator = ' | '
 
        self.elements = elements or self.default_elements
 
        self.started = None
 
        self.finished = False
 
        self.steps_label = 'Step'
 
        self.time_label = 'Time'
 
        self.eta_label = 'ETA'
 
        self.speed_label = 'Speed'
 
        self.transfer_label = 'Transfer'
 

	
 
    def __str__(self):
 
        return self.get_line()
 

	
 
    def __iter__(self):
 
        start = self.step
 
        end = self.steps + 1
 
        for x in xrange(start, end):
 
            self.render(x)
 
            yield x
 

	
 
    def get_separator(self):
 
        return self.separator
 

	
 
    def get_bar_char(self):
 
        return self.bar_char
 

	
 
    def get_bar(self):
 
        char = self.get_bar_char()
 
        perc = self.get_percentage()
 
        length = int(self.width * perc / 100)
 
        bar = char * length
 
        bar = bar.ljust(self.width)
 
        return bar
 

	
 
    def get_elements(self):
 
        return self.elements
 

	
 
    def get_template(self):
 
        separator = self.get_separator()
 
        elements = self.get_elements()
 
        return string.Template(separator.join((('$%s' % e) for e in elements)))
 

	
 
    def get_total_time(self, current_time=None):
 
        if current_time is None:
 
            current_time = datetime.datetime.now()
 
        if not self.started:
 
            return datetime.timedelta()
 
        return current_time - self.started
 

	
 
    def get_rendered_total_time(self):
 
        delta = self.get_total_time()
 
        if not delta:
 
            ttime = '-'
 
        else:
 
            ttime = str(delta)
 
        return '%s %s' % (self.time_label, ttime)
 

	
 
    def get_eta(self, current_time=None):
 
        if current_time is None:
 
            current_time = datetime.datetime.now()
 
        if self.step == 0:
 
            return datetime.timedelta()
 
        total_seconds = self.get_total_time().total_seconds()
 
        eta_seconds = total_seconds * self.steps / self.step - total_seconds
 
        return datetime.timedelta(seconds=int(eta_seconds))
 

	
 
    def get_rendered_eta(self):
 
        eta = self.get_eta()
 
        if not eta:
 
            eta = '--:--:--'
 
        else:
 
            eta = str(eta).rjust(8)
 
        return '%s: %s' % (self.eta_label, eta)
 

	
 
    def get_percentage(self):
 
        return float(self.step) / self.steps * 100
 

	
 
    def get_rendered_percentage(self):
 
        perc = self.get_percentage()
 
        return ('%s%%' % (int(perc))).rjust(5)
 

	
 
    def get_rendered_steps(self):
 
        return '%s: %s/%s' % (self.steps_label, self.step, self.steps)
 

	
 
    def get_rendered_speed(self, step=None, total_seconds=None):
 
        if step is None:
 
            step = self.step
 
        if total_seconds is None:
 
            total_seconds = self.get_total_time().total_seconds()
 
        if step <= 0 or total_seconds <= 0:
 
            speed = '-'
 
        else:
 
            speed = filesizeformat(float(step) / total_seconds)
 
        return '%s: %s/s' % (self.speed_label, speed)
 

	
 
    def get_rendered_transfer(self, step=None, steps=None):
 
        if step is None:
 
            step = self.step
 
        if steps is None:
 
            steps = self.steps
 

	
 
        if steps <= 0:
 
            return '%s: -' % self.transfer_label
 
        total = filesizeformat(float(steps))
 
        if step <= 0:
 
            transferred = '-'
 
        else:
 
            transferred = filesizeformat(float(step))
 
        return '%s: %s / %s' % (self.transfer_label, transferred, total)
 

	
 
    def get_context(self):
 
        return {
 
            'percentage': self.get_rendered_percentage(),
 
            'bar': self.get_bar(),
 
            'steps': self.get_rendered_steps(),
 
            'time': self.get_rendered_total_time(),
 
            'eta': self.get_rendered_eta(),
 
            'speed': self.get_rendered_speed(),
 
            'transfer': self.get_rendered_transfer(),
 
        }
 

	
 
    def get_line(self):
 
        template = self.get_template()
 
        context = self.get_context()
 
        return template.safe_substitute(**context)
 

	
 
    def write(self, data):
 
        self.stream.write(data)
 

	
 
    def render(self, step):
 
        if not self.started:
 
            self.started = datetime.datetime.now()
 
        if self.finished:
 
            raise AlreadyFinishedError
 
        self.step = step
 
        self.write('\r%s' % self)
 
        if step == self.steps:
 
            self.finished = True
 
        if step == self.steps:
 
            self.write('\n')
 

	
 

	
 
"""
 
termcolors.py
 

	
 
Grabbed from Django (http://www.djangoproject.com)
 
"""
 

	
 
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
 
foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
 
background = dict([(color_names[x], '4%s' % x) for x in range(8)])
 

	
 
RESET = '0'
 
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
 

	
 

	
 
def colorize(text='', opts=(), **kwargs):
 
    """
 
    Returns your text, enclosed in ANSI graphics codes.
 

	
 
    Depends on the keyword arguments 'fg' and 'bg', and the contents of
 
    the opts tuple/list.
 

	
 
    Returns the RESET code if no parameters are given.
 

	
 
    Valid colors:
 
        'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
 

	
 
    Valid options:
 
        'bold'
 
        'underscore'
 
        'blink'
 
        'reverse'
 
        'conceal'
 
        'noreset' - string will not be auto-terminated with the RESET code
 

	
 
    Examples:
 
        colorize('hello', fg='red', bg='blue', opts=('blink',))
 
        colorize()
 
        colorize('goodbye', opts=('underscore',))
 
        print colorize('first line', fg='red', opts=('noreset',))
 
        print 'this should be red too'
 
        print colorize('and so should this')
 
        print 'this should not be red'
 
    """
 
    code_list = []
 
    if text == '' and len(opts) == 1 and opts[0] == 'reset':
 
        return '\x1b[%sm' % RESET
 
    for k, v in kwargs.iteritems():
 
        if k == 'fg':
 
            code_list.append(foreground[v])
 
        elif k == 'bg':
 
            code_list.append(background[v])
 
    for o in opts:
 
        if o in opt_dict:
 
            code_list.append(opt_dict[o])
 
    if 'noreset' not in opts:
 
        text = text + '\x1b[%sm' % RESET
 
    return ('\x1b[%sm' % ';'.join(code_list)) + text
 

	
 

	
 
def make_style(opts=(), **kwargs):
 
    """
 
    Returns a function with default parameters for colorize()
 

	
 
    Example:
 
        bold_red = make_style(opts=('bold',), fg='red')
 
        print bold_red('hello')
 
        KEYWORD = make_style(fg='yellow')
 
        COMMENT = make_style(fg='blue', opts=('bold',))
 
    """
 
    return lambda text: colorize(text, opts, **kwargs)
 

	
 

	
 
NOCOLOR_PALETTE = 'nocolor'
 
DARK_PALETTE = 'dark'
 
LIGHT_PALETTE = 'light'
 

	
 
PALETTES = {
 
    NOCOLOR_PALETTE: {
 
        'ERROR':        {},
 
        'NOTICE':       {},
 
        'SQL_FIELD':    {},
 
        'SQL_COLTYPE':  {},
 
        'SQL_KEYWORD':  {},
 
        'SQL_TABLE':    {},
 
        'HTTP_INFO':         {},
 
        'HTTP_SUCCESS':      {},
 
        'HTTP_REDIRECT':     {},
 
        'HTTP_NOT_MODIFIED': {},
 
        'HTTP_BAD_REQUEST':  {},
 
        'HTTP_NOT_FOUND':    {},
 
        'HTTP_SERVER_ERROR': {},
 
    },
 
    DARK_PALETTE: {
 
        'ERROR':        { 'fg': 'red', 'opts': ('bold',) },
 
        'NOTICE':       { 'fg': 'red' },
 
        'SQL_FIELD':    { 'fg': 'green', 'opts': ('bold',) },
 
        'SQL_COLTYPE':  { 'fg': 'green' },
 
        'SQL_KEYWORD':  { 'fg': 'yellow' },
 
        'SQL_TABLE':    { 'opts': ('bold',) },
 
        'HTTP_INFO':         { 'opts': ('bold',) },
 
        'HTTP_SUCCESS':      { },
 
        'HTTP_REDIRECT':     { 'fg': 'green' },
 
        'HTTP_NOT_MODIFIED': { 'fg': 'cyan' },
 
        'HTTP_BAD_REQUEST':  { 'fg': 'red', 'opts': ('bold',) },
 
        'HTTP_NOT_FOUND':    { 'fg': 'yellow' },
 
        'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
 
    },
 
    LIGHT_PALETTE: {
 
        'ERROR':        { 'fg': 'red', 'opts': ('bold',) },
 
        'NOTICE':       { 'fg': 'red' },
 
        'SQL_FIELD':    { 'fg': 'green', 'opts': ('bold',) },
 
        'SQL_COLTYPE':  { 'fg': 'green' },
 
        'SQL_KEYWORD':  { 'fg': 'blue' },
 
        'SQL_TABLE':    { 'opts': ('bold',) },
 
        'HTTP_INFO':         { 'opts': ('bold',) },
 
        'HTTP_SUCCESS':      { },
 
        'HTTP_REDIRECT':     { 'fg': 'green', 'opts': ('bold',) },
 
        'HTTP_NOT_MODIFIED': { 'fg': 'green' },
 
        'HTTP_BAD_REQUEST':  { 'fg': 'red', 'opts': ('bold',) },
 
        'HTTP_NOT_FOUND':    { 'fg': 'red' },
 
        'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) },
 
    }
 
}
 
DEFAULT_PALETTE = DARK_PALETTE
 

	
 
# ---------------------------- #
 
# --- End of termcolors.py --- #
 
# ---------------------------- #
 

	
 

	
 
class ColoredProgressBar(ProgressBar):
 

	
 
    BAR_COLORS = (
 
        (10, 'red'),
 
        (30, 'magenta'),
 
        (50, 'yellow'),
 
        (99, 'green'),
 
        (100, 'blue'),
 
    )
 

	
 
    def get_line(self):
 
        line = super(ColoredProgressBar, self).get_line()
 
        perc = self.get_percentage()
 
        if perc > 100:
 
            color = 'blue'
 
        for max_perc, color in self.BAR_COLORS:
 
            if perc <= max_perc:
 
                break
 
        return colorize(line, fg=color)
 

	
 

	
 
class AnimatedProgressBar(ProgressBar):
 

	
 
    def get_bar_char(self):
 
        chars = '-/|\\'
 
        if self.step >= self.steps:
 
            return '='
 
        return chars[self.step % len(chars)]
 

	
 

	
 
class BarOnlyProgressBar(ProgressBar):
 

	
 
    default_elements = ['bar', 'steps']
 

	
 
    def get_bar(self):
 
        bar = super(BarOnlyProgressBar, self).get_bar()
 
        perc = self.get_percentage()
 
        perc_text = '%s%%' % int(perc)
 
        text = (' %s%% ' % (perc_text)).center(self.width, '=')
 
        L = text.find(' ')
 
        R = text.rfind(' ')
 
        bar = ' '.join((bar[:L], perc_text, bar[R:]))
 
        return bar
 

	
 

	
 
class AnimatedColoredProgressBar(AnimatedProgressBar,
 
                                 ColoredProgressBar):
 
    pass
 

	
 

	
 
class BarOnlyColoredProgressBar(ColoredProgressBar,
 
                                BarOnlyProgressBar):
 
    pass
 

	
 

	
 
def main():
 
    import time
 

	
 
    print "Standard progress bar..."
 
    print("Standard progress bar...")
 
    bar = ProgressBar(30)
 
    for x in xrange(1, 31):
 
        bar.render(x)
 
        time.sleep(0.02)
 
    bar.stream.write('\n')
 
    print
 
    print()
 

	
 
    print "Empty bar..."
 
    print("Empty bar...")
 
    bar = ProgressBar(50)
 
    bar.render(0)
 
    print
 
    print
 
    print()
 
    print()
 

	
 
    print "Colored bar..."
 
    print("Colored bar...")
 
    bar = ColoredProgressBar(20)
 
    for x in bar:
 
        time.sleep(0.01)
 
    print
 
    print()
 

	
 
    print "Animated char bar..."
 
    print("Animated char bar...")
 
    bar = AnimatedProgressBar(20)
 
    for x in bar:
 
        time.sleep(0.01)
 
    print
 
    print()
 

	
 
    print "Animated + colored char bar..."
 
    print("Animated + colored char bar...")
 
    bar = AnimatedColoredProgressBar(20)
 
    for x in bar:
 
        time.sleep(0.01)
 
    print
 
    print()
 

	
 
    print "Bar only ..."
 
    print("Bar only ...")
 
    bar = BarOnlyProgressBar(20)
 
    for x in bar:
 
        time.sleep(0.01)
 
    print
 
    print()
 

	
 
    print "Colored, longer bar-only, eta, total time ..."
 
    print("Colored, longer bar-only, eta, total time ...")
 
    bar = BarOnlyColoredProgressBar(40)
 
    bar.width = 60
 
    bar.elements += ['time', 'eta']
 
    for x in bar:
 
        time.sleep(0.01)
 
    print
 
    print
 
    print()
 
    print()
 

	
 
    print "File transfer bar, breaks after 2 seconds ..."
 
    print("File transfer bar, breaks after 2 seconds ...")
 
    total_bytes = 1024 * 1024 * 2
 
    bar = ProgressBar(total_bytes)
 
    bar.width = 50
 
    bar.elements.remove('steps')
 
    bar.elements += ['transfer', 'time', 'eta', 'speed']
 
    for x in xrange(0, bar.steps, 1024):
 
        bar.render(x)
 
        time.sleep(0.01)
 
        now = datetime.datetime.now()
 
        if now - bar.started >= datetime.timedelta(seconds=2):
 
            break
 
    print
 
    print
 
    print()
 
    print()
 

	
 

	
 
if __name__ == '__main__':
 
    main()
kallithea/tests/other/test_vcs_operations.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Test suite for vcs push/pull operations.
 

	
 
The tests need Git > 1.8.1.
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Dec 30, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import print_function
 

	
 
import json
 
import os
 
import re
 
import tempfile
 
import time
 
import urllib2
 
from subprocess import PIPE, Popen
 
from tempfile import _RandomNameSequence
 

	
 
import pytest
 

	
 
from kallithea import CONFIG
 
from kallithea.model.db import CacheInvalidation, Repository, Ui, User, UserIpMap, UserLog
 
from kallithea.model.meta import Session
 
from kallithea.model.ssh_key import SshKeyModel
 
from kallithea.model.user import UserModel
 
from kallithea.tests.base import *
 
from kallithea.tests.fixture import Fixture
 

	
 

	
 
DEBUG = True
 
HOST = '127.0.0.1:4999'  # test host
 

	
 
fixture = Fixture()
 

	
 

	
 
# Parameterize different kinds of VCS testing - both the kind of VCS and the
 
# access method (HTTP/SSH)
 

	
 
# Mixin for using HTTP and SSH URLs
 
class HttpVcsTest(object):
 
    @staticmethod
 
    def repo_url_param(webserver, repo_name, **kwargs):
 
        return webserver.repo_url(repo_name, **kwargs)
 

	
 
class SshVcsTest(object):
 
    public_keys = {
 
        TEST_USER_REGULAR_LOGIN: u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost',
 
        TEST_USER_ADMIN_LOGIN: u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost',
 
    }
 

	
 
    @classmethod
 
    def repo_url_param(cls, webserver, repo_name, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS, client_ip=IP_ADDR):
 
        user = User.get_by_username(username)
 
        if user.ssh_keys:
 
            ssh_key = user.ssh_keys[0]
 
        else:
 
            sshkeymodel = SshKeyModel()
 
            ssh_key = sshkeymodel.create(user, u'test key', cls.public_keys[user.username])
 
            Session().commit()
 

	
 
        return cls._ssh_param(repo_name, user, ssh_key, client_ip)
 

	
 
# Mixins for using Mercurial and Git
 
class HgVcsTest(object):
 
    repo_type = 'hg'
 
    repo_name = HG_REPO
 

	
 
class GitVcsTest(object):
 
    repo_type = 'git'
 
    repo_name = GIT_REPO
 

	
 
# Combine mixins to give the combinations we want to parameterize tests with
 
class HgHttpVcsTest(HgVcsTest, HttpVcsTest):
 
    pass
 

	
 
class GitHttpVcsTest(GitVcsTest, HttpVcsTest):
 
    pass
 

	
 
class HgSshVcsTest(HgVcsTest, SshVcsTest):
 
    @staticmethod
 
    def _ssh_param(repo_name, user, ssh_key, client_ip):
 
        # Specify a custom ssh command on the command line
 
        return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % (
 
            client_ip,
 
            CONFIG['__file__'],
 
            user.user_id,
 
            ssh_key.user_ssh_key_id,
 
            repo_name)
 

	
 
class GitSshVcsTest(GitVcsTest, SshVcsTest):
 
    @staticmethod
 
    def _ssh_param(repo_name, user, ssh_key, client_ip):
 
        # Set a custom ssh command in the global environment
 
        os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % (
 
            client_ip,
 
            CONFIG['__file__'],
 
            user.user_id,
 
            ssh_key.user_ssh_key_id)
 
        return "ssh://someuser@somehost/%s""" % repo_name
 

	
 
parametrize_vcs_test = parametrize('vt', [
 
    HgHttpVcsTest,
 
    GitHttpVcsTest,
 
    HgSshVcsTest,
 
    GitSshVcsTest,
 
])
 
parametrize_vcs_test_hg = parametrize('vt', [
 
    HgHttpVcsTest,
 
    HgSshVcsTest,
 
])
 
parametrize_vcs_test_http = parametrize('vt', [
 
    HgHttpVcsTest,
 
    GitHttpVcsTest,
 
])
 

	
 
class Command(object):
 

	
 
    def __init__(self, cwd):
 
        self.cwd = cwd
 

	
 
    def execute(self, *args, **environ):
 
        """
 
        Runs command on the system with given ``args`` using simple space
 
        join without safe quoting.
 
        """
 
        command = ' '.join(args)
 
        ignoreReturnCode = environ.pop('ignoreReturnCode', False)
 
        if DEBUG:
 
            print '*** CMD %s ***' % command
 
            print('*** CMD %s ***' % command)
 
        testenv = dict(os.environ)
 
        testenv['LANG'] = 'en_US.UTF-8'
 
        testenv['LANGUAGE'] = 'en_US:en'
 
        testenv['HGPLAIN'] = ''
 
        testenv['HGRCPATH'] = ''
 
        testenv.update(environ)
 
        p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv)
 
        stdout, stderr = p.communicate()
 
        if DEBUG:
 
            if stdout:
 
                print 'stdout:', stdout
 
                print('stdout:', stdout)
 
            if stderr:
 
                print 'stderr:', stderr
 
                print('stderr:', stderr)
 
        if not ignoreReturnCode:
 
            assert p.returncode == 0
 
        return stdout, stderr
 

	
 

	
 
def _get_tmp_dir(prefix='vcs_operations-', suffix=''):
 
    return tempfile.mkdtemp(dir=TESTS_TMP_PATH, prefix=prefix, suffix=suffix)
 

	
 

	
 
def _add_files(vcs, dest_dir, files_no=3):
 
    """
 
    Generate some files, add it to dest_dir repo and push back
 
    vcs is git or hg and defines what VCS we want to make those files for
 

	
 
    :param vcs:
 
    :param dest_dir:
 
    """
 
    added_file = '%ssetup.py' % _RandomNameSequence().next()
 
    open(os.path.join(dest_dir, added_file), 'a').close()
 
    Command(dest_dir).execute(vcs, 'add', added_file)
 

	
 
    email = 'me@example.com'
 
    if os.name == 'nt':
 
        author_str = 'User <%s>' % email
 
    else:
 
        author_str = 'User ǝɯɐᴎ <%s>' % email
 
    for i in xrange(files_no):
 
        cmd = """echo "added_line%s" >> %s""" % (i, added_file)
 
        Command(dest_dir).execute(cmd)
 
        if vcs == 'hg':
 
            cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % (
 
                i, author_str, added_file
 
            )
 
        elif vcs == 'git':
 
            cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % (
 
                i, author_str, added_file
 
            )
 
        # git commit needs EMAIL on some machines
 
        Command(dest_dir).execute(cmd, EMAIL=email)
 

	
 
def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3):
 
    _add_files(vt.repo_type, dest_dir, files_no=files_no)
 
    # PUSH it back
 
    stdout = stderr = None
 
    if vt.repo_type == 'hg':
 
        stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode)
 
    elif vt.repo_type == 'git':
 
        stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode)
 

	
 
    return stdout, stderr
 

	
 

	
 
def _check_outgoing(vcs, cwd, clone_url):
 
    if vcs == 'hg':
 
        # hg removes the password from default URLs, so we have to provide it here via the clone_url
 
        return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True)
 
    elif vcs == 'git':
 
        Command(cwd).execute('git remote update')
 
        return Command(cwd).execute('git log origin/master..master')
 

	
 

	
 
def set_anonymous_access(enable=True):
 
    user = User.get_default_user()
 
    user.active = enable
 
    Session().commit()
 
    if enable != User.get_default_user().active:
 
        raise Exception('Cannot set anonymous access')
 

	
 

	
 
#==============================================================================
 
# TESTS
 
#==============================================================================
 

	
 

	
 
def _check_proper_git_push(stdout, stderr):
 
    assert 'fatal' not in stderr
 
    assert 'rejected' not in stderr
 
    assert 'Pushing to' in stderr
 
    assert 'master -> master' in stderr
 

	
 

	
 
@pytest.mark.usefixtures("test_context_fixture")
 
class TestVCSOperations(TestController):
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        # DISABLE ANONYMOUS ACCESS
 
        set_anonymous_access(False)
 

	
 
    @pytest.fixture()
 
    def testhook_cleanup(self):
 
        yield
 
        # remove hook
 
        for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']:
 
            entry = Ui.get_by_key('hooks', '%s.testhook' % hook)
 
            if entry:
 
                Session().delete(entry)
 
        Session().commit()
 

	
 
    @pytest.fixture(scope="module")
 
    def testfork(self):
 
        # create fork so the repo stays untouched
 
        git_fork_name = u'%s_fork%s' % (GIT_REPO, _RandomNameSequence().next())
 
        fixture.create_fork(GIT_REPO, git_fork_name)
 
        hg_fork_name = u'%s_fork%s' % (HG_REPO, _RandomNameSequence().next())
 
        fixture.create_fork(HG_REPO, hg_fork_name)
 
        return {'git': git_fork_name, 'hg': hg_fork_name}
 

	
 
    @parametrize_vcs_test
 
    def test_clone_repo_by_admin(self, webserver, vt):
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
 

	
 
        if vt.repo_type == 'git':
 
            assert 'Cloning into' in stdout + stderr
 
            assert stderr == '' or stdout == ''
 
        elif vt.repo_type == 'hg':
 
            assert 'requesting all changes' in stdout
 
            assert 'adding changesets' in stdout
 
            assert 'adding manifests' in stdout
 
            assert 'adding file changes' in stdout
 
            assert stderr == ''
 

	
 
    @parametrize_vcs_test_http
 
    def test_clone_wrong_credentials(self, webserver, vt):
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!')
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        if vt.repo_type == 'git':
 
            assert 'fatal: Authentication failed' in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'abort: authorization failed' in stderr
 

	
 
    def test_clone_git_dir_as_hg(self, webserver):
 
        clone_url = HgHttpVcsTest.repo_url_param(webserver, GIT_REPO)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr
 

	
 
    def test_clone_hg_repo_as_git(self, webserver):
 
        clone_url = GitHttpVcsTest.repo_url_param(webserver, HG_REPO)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        assert 'not found' in stderr
 

	
 
    @parametrize_vcs_test
 
    def test_clone_non_existing_path(self, webserver, vt):
 
        clone_url = vt.repo_url_param(webserver, 'trololo')
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
        if vt.repo_type == 'git':
 
            assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout
 

	
 
    @parametrize_vcs_test
 
    def test_push_new_repo(self, webserver, vt):
 
        # Clear the log so we know what is added
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
        # Create an empty server repo using the API
 
        repo_name = u'new_%s_%s' % (vt.repo_type, _RandomNameSequence().next())
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        params = {
 
            "id": 7,
 
            "api_key": usr.api_key,
 
            "method": 'create_repo',
 
            "args": dict(repo_name=repo_name,
 
                         owner=TEST_USER_ADMIN_LOGIN,
 
                         repo_type=vt.repo_type),
 
        }
 
        req = urllib2.Request(
 
            'http://%s:%s/_admin/api' % webserver.server_address,
 
            data=json.dumps(params),
 
            headers={'content-type': 'application/json'})
 
        response = urllib2.urlopen(req)
 
        result = json.loads(response.read())
 
        # Expect something like:
 
        # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None}
 
        assert result[u'result'][u'success']
 

	
 
        # Create local clone of the empty server repo
 
        local_clone_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, repo_name)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir)
 

	
 
        # Make 3 commits and push to the empty server repo.
 
        # The server repo doesn't have any other heads than the
 
        # refs/heads/master we are pushing, but the `git log` in the push hook
 
        # should still list the 3 commits.
 
        stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url)
 
        if vt.repo_type == 'git':
 
            _check_proper_git_push(stdout, stderr)
 
        elif vt.repo_type == 'hg':
 
            assert 'pushing to ' in stdout
 
            assert 'remote: added ' in stdout
 

	
 
        # Verify that we got the right events in UserLog. Expect something like:
 
        # <UserLog('id:new_git_XXX:started_following_repo')>
 
        # <UserLog('id:new_git_XXX:user_created_repo')>
 
        # <UserLog('id:new_git_XXX:pull')>
 
        # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')>
 
        action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
 
        assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([
 
            (u'started_following_repo', 0),
 
            (u'user_created_repo', 0),
 
            (u'pull', 0),
 
            (u'push', 3)]
 
            if vt.repo_type == 'git' else [
 
            (u'started_following_repo', 0),
 
            (u'user_created_repo', 0),
 
            # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones
 
            (u'push', 3)])
 

	
 
    @parametrize_vcs_test
 
    def test_push_new_file(self, webserver, testfork, vt):
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
        dest_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 

	
 
        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
 
        stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url)
 

	
 
        if vt.repo_type == 'git':
 
            _check_proper_git_push(stdout, stderr)
 
        elif vt.repo_type == 'hg':
 
            assert 'pushing to' in stdout
 
            assert 'Repository size' in stdout
 
            assert 'Last revision is now' in stdout
 

	
 
        action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
 
        assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
 
            [(u'pull', 0), (u'push', 3)]
 

	
 
    @parametrize_vcs_test
 
    def test_pull(self, webserver, testfork, vt):
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
        dest_dir = _get_tmp_dir()
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir)
 

	
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url)
 

	
 
        if vt.repo_type == 'git':
 
            assert 'FETCH_HEAD' in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'new changesets' in stdout
 

	
 
        action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)]
 
        assert action_parts == [u'pull']
 

	
 
    @parametrize_vcs_test
 
    def test_push_invalidates_cache(self, webserver, testfork, vt):
 
        pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
 

	
 
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
 
                                               == testfork[vt.repo_type]).scalar()
 
        if not key:
 
            key = CacheInvalidation(testfork[vt.repo_type], testfork[vt.repo_type])
 
            Session().add(key)
 

	
 
        key.cache_active = True
 
        Session().commit()
 

	
 
        dest_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type])
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 

	
 
        stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url)
 

	
 
        if vt.repo_type == 'git':
 
            _check_proper_git_push(stdout, stderr)
 

	
 
        post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])]
 
        assert pre_cached_tip != post_cached_tip
 

	
 
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
 
                                               == testfork[vt.repo_type]).all()
 
        assert key == []
 

	
 
    @parametrize_vcs_test_http
 
    def test_push_wrong_credentials(self, webserver, vt):
 
        dest_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 

	
 
        clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name')
 
        stdout, stderr = _add_files_and_push(webserver, vt, dest_dir,
 
                                             clone_url=clone_url, ignoreReturnCode=True)
 

	
 
        if vt.repo_type == 'git':
 
            assert 'fatal: Authentication failed' in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'abort: authorization failed' in stderr
 

	
 
    @parametrize_vcs_test
 
    def test_push_with_readonly_credentials(self, webserver, vt):
 
        UserLog.query().delete()
 
        Session().commit()
 

	
 
        dest_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name, username=TEST_USER_REGULAR_LOGIN, password=TEST_USER_REGULAR_PASS)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 

	
 
        stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url)
 

	
 
        if vt.repo_type == 'git':
 
            assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout
 

	
 
        action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)]
 
        assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \
 
            [(u'pull', 0)]
 

	
 
    @parametrize_vcs_test
 
    def test_push_back_to_wrong_url(self, webserver, vt):
 
        dest_dir = _get_tmp_dir()
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir)
 

	
 
        stdout, stderr = _add_files_and_push(
 
            webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % (
 
                webserver.server_address[0], webserver.server_address[1]),
 
            ignoreReturnCode=True)
 

	
 
        if vt.repo_type == 'git':
 
            assert 'not found' in stderr
 
        elif vt.repo_type == 'hg':
 
            assert 'HTTP Error 404: Not Found' in stderr
 

	
 
    @parametrize_vcs_test
 
    def test_ip_restriction(self, webserver, vt):
 
        user_model = UserModel()
 
        try:
 
            # Add IP constraint that excludes the test context:
 
            user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
 
            Session().commit()
 
            # IP permissions are cached, need to wait for the cache in the server process to expire
 
            time.sleep(1.5)
 
            clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
            stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True)
 
            if vt.repo_type == 'git':
 
                # The message apparently changed in Git 1.8.3, so match it loosely.
 
                assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr
 
            elif vt.repo_type == 'hg':
 
                assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout
 
        finally:
 
            # release IP restrictions
 
            for ip in UserIpMap.query():
 
                UserIpMap.delete(ip.ip_id)
 
            Session().commit()
 
            # IP permissions are cached, need to wait for the cache in the server process to expire
 
            time.sleep(1.5)
 

	
 
        clone_url = vt.repo_url_param(webserver, vt.repo_name)
 
        stdout, stderr = Command(TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir())
 

	
 
        if vt.repo_type == 'git':
 
            assert 'Cloning into' in stdout + stderr
 
            assert stderr == '' or stdout == ''
 
        elif vt.repo_type == 'hg':
 
            assert 'requesting all changes' in stdout
 
            assert 'adding changesets' in stdout
 
            assert 'adding manifests' in stdout
 
            assert 'adding file changes' in stdout
 

	
 
            assert stderr == ''
 

	
 
    @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks
 
    def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt):
 
        # set prechangegroup to failing hook (returns True)
 
        Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook')
 
        Session().commit()
 
        # clone repo
 
        clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS)
 
        dest_dir = _get_tmp_dir()
 
        stdout, stderr = Command(TESTS_TMP_PATH) \
 
            .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True)
 
        if vt.repo_type == 'hg':
 
            assert 'preoutgoing.testhook hook failed' in stdout
 
        elif vt.repo_type == 'git':
kallithea/tests/scripts/manual_test_concurrency.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.tests.scripts.manual_test_concurrency
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Test suite for making push/pull operations
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Dec 30, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import print_function
 

	
 
import logging
 
import os
 
import shutil
 
import sys
 
import tempfile
 
from os.path import dirname
 
from subprocess import PIPE, Popen
 

	
 
from paste.deploy import appconfig
 
from sqlalchemy import engine_from_config
 

	
 
from kallithea.config.environment import load_environment
 
from kallithea.lib.auth import get_crypt_password
 
from kallithea.lib.utils import setup_cache_regions
 
from kallithea.model import meta
 
from kallithea.model.base import init_model
 
from kallithea.model.db import Repository, Ui, User
 
from kallithea.tests.base import HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
 

	
 

	
 
rel_path = dirname(dirname(dirname(dirname(os.path.abspath(__file__)))))
 
conf = appconfig('config:development.ini', relative_to=rel_path)
 
load_environment(conf.global_conf, conf.local_conf)
 

	
 
setup_cache_regions(conf)
 

	
 
USER = TEST_USER_ADMIN_LOGIN
 
PASS = TEST_USER_ADMIN_PASS
 
HOST = 'server.local'
 
METHOD = 'pull'
 
DEBUG = True
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(object):
 

	
 
    def __init__(self, cwd):
 
        self.cwd = cwd
 

	
 
    def execute(self, cmd, *args):
 
        """Runs command on the system with given ``args``.
 
        """
 

	
 
        command = cmd + ' ' + ' '.join(args)
 
        log.debug('Executing %s', command)
 
        if DEBUG:
 
            print command
 
            print(command)
 
        p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd)
 
        stdout, stderr = p.communicate()
 
        if DEBUG:
 
            print stdout, stderr
 
            print(stdout, stderr)
 
        return stdout, stderr
 

	
 

	
 
def get_session():
 
    engine = engine_from_config(conf, 'sqlalchemy.')
 
    init_model(engine)
 
    sa = meta.Session
 
    return sa
 

	
 

	
 
def create_test_user(force=True):
 
    print 'creating test user'
 
    print('creating test user')
 
    sa = get_session()
 

	
 
    user = sa.query(User).filter(User.username == USER).scalar()
 

	
 
    if force and user is not None:
 
        print 'removing current user'
 
        print('removing current user')
 
        for repo in sa.query(Repository).filter(Repository.user == user).all():
 
            sa.delete(repo)
 
        sa.delete(user)
 
        sa.commit()
 

	
 
    if user is None or force:
 
        print 'creating new one'
 
        print('creating new one')
 
        new_usr = User()
 
        new_usr.username = USER
 
        new_usr.password = get_crypt_password(PASS)
 
        new_usr.email = 'mail@example.com'
 
        new_usr.name = 'test'
 
        new_usr.lastname = 'lasttestname'
 
        new_usr.active = True
 
        new_usr.admin = True
 
        sa.add(new_usr)
 
        sa.commit()
 

	
 
    print 'done'
 
    print('done')
 

	
 

	
 
def create_test_repo(force=True):
 
    print 'creating test repo'
 
    print('creating test repo')
 
    from kallithea.model.repo import RepoModel
 
    sa = get_session()
 

	
 
    user = sa.query(User).filter(User.username == USER).scalar()
 
    if user is None:
 
        raise Exception('user not found')
 

	
 
    repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar()
 

	
 
    if repo is None:
 
        print 'repo not found creating'
 
        print('repo not found creating')
 

	
 
        form_data = {'repo_name': HG_REPO,
 
                     'repo_type': 'hg',
 
                     'private': False,
 
                     'clone_uri': ''}
 
        rm = RepoModel()
 
        rm.base_path = '/home/hg'
 
        rm.create(form_data, user)
 

	
 
    print 'done'
 
    print('done')
 

	
 

	
 
def set_anonymous_access(enable=True):
 
    sa = get_session()
 
    user = sa.query(User).filter(User.username == 'default').one()
 
    user.active = enable
 
    sa.add(user)
 
    sa.commit()
 

	
 

	
 
def get_anonymous_access():
 
    sa = get_session()
 
    return sa.query(User).filter(User.username == 'default').one().active
 

	
 

	
 
#==============================================================================
 
# TESTS
 
#==============================================================================
 
def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD,
 
                                backend='hg'):
 
    cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo)
 

	
 
    try:
 
        shutil.rmtree(path, ignore_errors=True)
 
        os.makedirs(path)
 
        #print 'made dirs %s' % os.path.join(path)
 
    except OSError:
 
        raise
 

	
 
    clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
 
                  {'user': USER,
 
                   'pass': PASS,
 
                   'host': HOST,
 
                   'cloned_repo': repo, }
 

	
 
    dest = tempfile.mktemp(dir=path, prefix='dest-')
 
    if method == 'pull':
 
        stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
 
    else:
 
        stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
 
        if not no_errors:
 
            if backend == 'hg':
 
                assert """adding file changes""" in stdout, 'no messages about cloning'
 
                assert """abort""" not in stderr, 'got error from clone'
 
            elif backend == 'git':
 
                assert """Cloning into""" in stdout, 'no messages about cloning'
 

	
 

	
 
if __name__ == '__main__':
 
    try:
 
        create_test_user(force=False)
 
        import time
 

	
 
        try:
 
            METHOD = sys.argv[3]
 
        except IndexError:
 
            pass
 

	
 
        try:
 
            backend = sys.argv[4]
 
        except IndexError:
 
            backend = 'hg'
 

	
 
        if METHOD == 'pull':
 
            seq = tempfile._RandomNameSequence().next()
 
            test_clone_with_credentials(repo=sys.argv[1], method='clone',
 
                                        backend=backend)
 
        s = time.time()
 
        for i in range(1, int(sys.argv[2]) + 1):
 
            print 'take', i
 
            print('take', i)
 
            test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
 
                                        backend=backend)
 
        print 'time taken %.3f' % (time.time() - s)
 
        print('time taken %.3f' % (time.time() - s))
 
    except Exception as e:
 
        sys.exit('stop on %s' % e)
kallithea/tests/scripts/manual_test_crawler.py
Show inline comments
 
#!/usr/bin/env python2
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.tests.scripts.manual_test_crawler
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Test for crawling a project for memory usage
 
This should be runned just as regular script together
 
with a watch script that will show memory usage.
 

	
 
watch -n1 ./kallithea/tests/mem_watch
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import print_function
 

	
 
import cookielib
 
import os
 
import sys
 
import tempfile
 
import time
 
import urllib
 
import urllib2
 
from os.path import dirname
 

	
 
from kallithea.lib import vcs
 
from kallithea.lib.compat import OrderedSet
 
from kallithea.lib.vcs.exceptions import RepositoryError
 

	
 

	
 
__here__ = os.path.abspath(__file__)
 
__root__ = dirname(dirname(dirname(__here__)))
 
sys.path.append(__root__)
 

	
 

	
 
PASES = 3
 
HOST = 'http://127.0.0.1'
 
PORT = 5000
 
BASE_URI = '%s:%s/' % (HOST, PORT)
 

	
 
if len(sys.argv) == 2:
 
    BASE_URI = sys.argv[1]
 

	
 
if not BASE_URI.endswith('/'):
 
    BASE_URI += '/'
 

	
 
print 'Crawling @ %s' % BASE_URI
 
print('Crawling @ %s' % BASE_URI)
 
BASE_URI += '%s'
 
PROJECT_PATH = os.path.join('/', 'home', 'username', 'repos')
 
PROJECTS = [
 
    # 'linux-magx-pbranch',
 
    'CPython',
 
    'kallithea',
 
]
 

	
 

	
 
cj = cookielib.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt'))
 
o = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
 
o.addheaders = [
 
    ('User-agent', 'kallithea-crawler'),
 
    ('Accept-Language', 'en - us, en;q = 0.5')
 
]
 

	
 
urllib2.install_opener(o)
 

	
 

	
 
def _get_repo(proj):
 
    if isinstance(proj, basestring):
 
        repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj))
 
        proj = proj
 
    else:
 
        repo = proj
 
        proj = repo.name
 

	
 
    return repo, proj
 

	
 

	
 
def test_changelog_walk(proj, pages=100):
 
    repo, proj = _get_repo(proj)
 

	
 
    total_time = 0
 
    for i in range(1, pages):
 

	
 
        page = '/'.join((proj, 'changelog',))
 

	
 
        full_uri = (BASE_URI % page) + '?' + urllib.urlencode({'page': i})
 
        s = time.time()
 
        f = o.open(full_uri)
 

	
 
        assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri)
 

	
 
        size = len(f.read())
 
        e = time.time() - s
 
        total_time += e
 
        print 'visited %s size:%s req:%s ms' % (full_uri, size, e)
 
        print('visited %s size:%s req:%s ms' % (full_uri, size, e))
 

	
 
    print 'total_time', total_time
 
    print 'average on req', total_time / float(pages)
 
    print('total_time', total_time)
 
    print('average on req', total_time / float(pages))
 

	
 

	
 
def test_changeset_walk(proj, limit=None):
 
    repo, proj = _get_repo(proj)
 

	
 
    print 'processing', os.path.join(PROJECT_PATH, proj)
 
    print('processing', os.path.join(PROJECT_PATH, proj))
 
    total_time = 0
 

	
 
    cnt = 0
 
    for i in repo:
 
        cnt += 1
 
        raw_cs = '/'.join((proj, 'changeset', i.raw_id))
 
        if limit and limit == cnt:
 
            break
 

	
 
        full_uri = (BASE_URI % raw_cs)
 
        print '%s visiting %s\%s' % (cnt, full_uri, i)
 
        print('%s visiting %s\%s' % (cnt, full_uri, i))
 
        s = time.time()
 
        f = o.open(full_uri)
 
        size = len(f.read())
 
        e = time.time() - s
 
        total_time += e
 
        print '%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)
 
        print('%s visited %s\%s size:%s req:%s ms' % (cnt, full_uri, i, size, e))
 

	
 
    print 'total_time', total_time
 
    print 'average on req', total_time / float(cnt)
 
    print('total_time', total_time)
 
    print('average on req', total_time / float(cnt))
 

	
 

	
 
def test_files_walk(proj, limit=100):
 
    repo, proj = _get_repo(proj)
 

	
 
    print 'processing', os.path.join(PROJECT_PATH, proj)
 
    print('processing', os.path.join(PROJECT_PATH, proj))
 
    total_time = 0
 

	
 
    paths_ = OrderedSet([''])
 
    try:
 
        tip = repo.get_changeset('tip')
 
        for topnode, dirs, files in tip.walk('/'):
 

	
 
            for dir in dirs:
 
                paths_.add(dir.path)
 
                for f in dir:
 
                    paths_.add(f.path)
 

	
 
            for f in files:
 
                paths_.add(f.path)
 

	
 
    except RepositoryError as e:
 
        pass
 

	
 
    cnt = 0
 
    for f in paths_:
 
        cnt += 1
 
        if limit and limit == cnt:
 
            break
 

	
 
        file_path = '/'.join((proj, 'files', 'tip', f))
 
        full_uri = (BASE_URI % file_path)
 
        print '%s visiting %s' % (cnt, full_uri)
 
        print('%s visiting %s' % (cnt, full_uri))
 
        s = time.time()
 
        f = o.open(full_uri)
 
        size = len(f.read())
 
        e = time.time() - s
 
        total_time += e
 
        print '%s visited OK size:%s req:%s ms' % (cnt, size, e)
 
        print('%s visited OK size:%s req:%s ms' % (cnt, size, e))
 

	
 
    print 'total_time', total_time
 
    print 'average on req', total_time / float(cnt)
 
    print('total_time', total_time)
 
    print('average on req', total_time / float(cnt))
 

	
 
if __name__ == '__main__':
 
    for path in PROJECTS:
 
        repo = vcs.get_repo(os.path.join(PROJECT_PATH, path))
 
        for i in range(PASES):
 
            print 'PASS %s/%s' % (i, PASES)
 
            print('PASS %s/%s' % (i, PASES))
 
            test_changelog_walk(repo, pages=80)
 
            test_changeset_walk(repo, limit=100)
 
            test_files_walk(repo, limit=100)
scripts/docs-headings.py
Show inline comments
 
#!/usr/bin/env python2
 

	
 
"""
 
Consistent formatting of rst section titles
 
"""
 

	
 
from __future__ import print_function
 

	
 
import re
 
import subprocess
 

	
 

	
 
spaces = [
 
    (0, 1), # we assume this is a over-and-underlined header
 
    (2, 1),
 
    (1, 1),
 
    (1, 0),
 
    (1, 0),
 
    ]
 

	
 
# http://sphinx-doc.org/rest.html :
 
#   for the Python documentation, this convention is used which you may follow:
 
#   # with overline, for parts
 
#   * with overline, for chapters
 
#   =, for sections
 
#   -, for subsections
 
#   ^, for subsubsections
 
#   ", for paragraphs
 
pystyles = ['#', '*', '=', '-', '^', '"']
 

	
 
# match on a header line underlined with one of the valid characters
 
headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE)
 

	
 

	
 
def main():
 
    filenames = subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines()
 
    for fn in filenames:
 
        print 'processing %s' % fn
 
        print('processing %s' % fn)
 
        s = open(fn).read()
 

	
 
        # find levels and their styles
 
        lastpos = 0
 
        styles = []
 
        for markup in headermatch.findall(s):
 
            style = markup[1]
 
            if style in styles:
 
                stylepos = styles.index(style)
 
                if stylepos > lastpos + 1:
 
                    print 'bad style %r with level %s - was at %s' % (style, stylepos, lastpos)
 
                    print('bad style %r with level %s - was at %s' % (style, stylepos, lastpos))
 
            else:
 
                stylepos = len(styles)
 
                if stylepos > lastpos + 1:
 
                    print 'bad new style %r - expected %r' % (style, styles[lastpos + 1])
 
                    print('bad new style %r - expected %r' % (style, styles[lastpos + 1]))
 
                else:
 
                    styles.append(style)
 
            lastpos = stylepos
 

	
 
        # remove superfluous spacing (may however be restored by header spacing)
 
        s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE)
 

	
 
        if styles:
 
            newstyles = pystyles[pystyles.index(styles[0]):]
 

	
 
            def subf(m):
 
                title, style = m.groups()
 
                level = styles.index(style)
 
                before, after = spaces[level]
 
                newstyle = newstyles[level]
 
                return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1)
 
            s = headermatch.sub(subf, s)
 

	
 
        # remove superfluous spacing when headers are adjacent
 
        s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE)
 
        # fix trailing space and spacing before link sections
 
        s = s.strip() + '\n'
 
        s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s)
 

	
 
        open(fn, 'w').write(s)
 

	
 
    print subprocess.check_output(['hg', 'diff'] + filenames)
 
    print(subprocess.check_output(['hg', 'diff'] + filenames))
 

	
 
if __name__ == '__main__':
 
    main()
scripts/generate-ini.py
Show inline comments
 
#!/usr/bin/env python2
 
"""
 
Based on kallithea/lib/paster_commands/template.ini.mako, generate development.ini
 
"""
 

	
 
from __future__ import print_function
 

	
 
import re
 

	
 
from kallithea.lib import inifile
 

	
 

	
 
# files to be generated from the mako template
 
ini_files = [
 
    ('development.ini',
 
        {
 
            '[server:main]': {
 
                'host': '0.0.0.0',
 
            },
 
            '[app:main]': {
 
                'debug': 'true',
 
                'app_instance_uuid': 'development-not-secret',
 
                'session.secret': 'development-not-secret',
 
            },
 
            '[logger_root]': {
 
                'handlers': 'console_color',
 
            },
 
            '[logger_routes]': {
 
                'level': 'DEBUG',
 
            },
 
            '[logger_beaker]': {
 
                'level': 'DEBUG',
 
            },
 
            '[logger_templates]': {
 
                'level': 'INFO',
 
            },
 
            '[logger_kallithea]': {
 
                'level': 'DEBUG',
 
            },
 
            '[logger_tg]': {
 
                'level': 'DEBUG',
 
            },
 
            '[logger_gearbox]': {
 
                'level': 'DEBUG',
 
            },
 
            '[logger_whoosh_indexer]': {
 
                'level': 'DEBUG',
 
            },
 
        },
 
    ),
 
]
 

	
 

	
 
def main():
 
    # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text>
 
    makofile = inifile.template_file
 
    print 'reading:', makofile
 
    print('reading:', makofile)
 
    mako_org = open(makofile).read()
 
    mako_no_text_markup = re.sub(r'</?%text>', '', mako_org)
 
    mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE)
 
    if mako_marked_up != mako_org:
 
        print 'writing:', makofile
 
        print('writing:', makofile)
 
        open(makofile, 'w').write(mako_marked_up)
 

	
 
    # create ini files
 
    for fn, settings in ini_files:
 
        print 'updating:', fn
 
        print('updating:', fn)
 
        inifile.create(fn, None, settings)
 

	
 

	
 
if __name__ == '__main__':
 
    main()
scripts/logformat.py
Show inline comments
 
#!/usr/bin/env python2
 

	
 
from __future__ import print_function
 

	
 
import re
 
import sys
 

	
 

	
 
logre = r'''
 
(log\.(?:error|info|warning|debug)
 
[(][ \n]*
 
)
 
%s
 
(
 
[ \n]*[)]
 
)
 
'''
 

	
 

	
 
res = [
 
    # handle % () - keeping spaces around the old %
 
    (re.compile(logre % r'''("[^"]*"|'[^']*')   ([\n ]*) %  ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'),
 
    # handle % without () - keeping spaces around the old %
 
    (re.compile(logre % r'''("[^"]*"|'[^']*')   ([\n ]*) %  ([\n ]*)    ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* )    ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'),
 
    # remove extra space if it is on next line
 
    (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*)    ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* )    ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'),
 
    # remove extra space if it is on same line
 
    (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+  () (   [\n ]+)    ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* )    ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'),
 
    # remove trailing , and space
 
    (re.compile(logre % r'''("[^"]*"|'[^']*') ,       () (   [\n ]*)    ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'),
 
    ]
 

	
 

	
 
def rewrite(f):
 
    s = open(f).read()
 
    for r, t in res:
 
        s = r.sub(t, s)
 
    open(f, 'w').write(s)
 

	
 

	
 
if __name__ == '__main__':
 
    if len(sys.argv) < 2:
 
        print 'Cleanup of superfluous % formatting of log statements.'
 
        print 'Usage:'
 
        print '''  hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff'''
 
        print('Cleanup of superfluous % formatting of log statements.')
 
        print('Usage:')
 
        print('''  hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''')
 
        raise SystemExit(1)
 

	
 
    for f in sys.argv[1:]:
 
        rewrite(f)
0 comments (0 inline, 0 general)