Changeset - 431689d7f37d
[Not reviewed]
kallithea/bin/kallithea_api.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_api
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Api CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 3, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import sys
 
import argparse
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] "
 
      "METHOD <key:val> <key2:val> ...\n"
 
      "Create config file: kallithea-api --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea API cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file')
 
    group.add_argument('--save-config', action='store_true', help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('API')
 
    group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None,
 
            help='API method name to call followed by key:value attributes',
 
    )
 
    group.add_argument('--format', dest='format', type=str,
 
            help='output format default: `%s` can '
 
                 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    apihost = args.apihost or conf['apihost']
 
    method = args.method
 

	
 
    # if we don't have method here it's an error
 
    if not method:
 
        parser.error('Please specify method name')
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
    return 0
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_config.py
Show inline comments
 
#!/usr/bin/env python2
 

	
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_config
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
configuration generator for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 18, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
from __future__ import with_statement
 
import os
 
import sys
 
import uuid
 
import argparse
 
from mako.template import Template
 
TMPL = 'template.ini.mako'
 
here = os.path.dirname(os.path.abspath(__file__))
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-config [-h] [--filename=FILENAME] [--template=TEMPLATE] \n"
 
      "VARS optional specify extra template variable that will be available in "
 
      "template. Use comma separated key=val format eg.\n"
 
      "key1=val1,port=5000,host=127.0.0.1,elements='a\,b\,c'\n"
 
    )
 

	
 
    parser = argparse.ArgumentParser(
 
        description='Kallithea CONFIG generator with variable replacement',
 
        usage=usage
 
    )
 

	
 
    ## config
 
    group = parser.add_argument_group('CONFIG')
 
    group.add_argument('--filename', help='Output ini filename.')
 
    group.add_argument('--template', help='Mako template file to use instead of '
 
                                          'the default builtin template')
 
    group.add_argument('--raw', help='Store given mako template as raw without '
 
                                     'parsing. Use this to create custom template '
 
                                     'initially', action='store_true')
 
    group.add_argument('--show-defaults', help='Show all default variables for '
 
                                               'builtin template', action='store_true')
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _escape_split(text, sep):
 
    """
 
    Allows for escaping of the separator: e.g. arg='foo\, bar'
 

	
 
    It should be noted that the way bash et. al. do command line parsing, those
 
    single quotes are required. a shameless ripoff from fabric project.
 

	
 
    """
 
    escaped_sep = r'\%s' % sep
 

	
 
    if escaped_sep not in text:
 
        return text.split(sep)
 

	
 
    before, _, after = text.partition(escaped_sep)
 
    startlist = before.split(sep)  # a regular split is fine here
 
    unfinished = startlist[-1]
 
    startlist = startlist[:-1]
 

	
 
    # recurse because there may be more escaped separators
 
    endlist = _escape_split(after, sep)
 

	
 
    # finish building the escaped value. we use endlist[0] becaue the first
 
    # part of the string sent in recursion is the rest of the escaped value.
 
    unfinished += sep + endlist[0]
 

	
 
    return startlist + [unfinished] + endlist[1:]  # put together all the parts
 

	
 
def _run(argv):
 
    parser, args, other = argparser(argv)
 
    if not len(sys.argv) > 1:
 
        print parser.print_help()
 
        sys.exit(0)
 
    # defaults that can be overwritten by arguments
 
    tmpl_stored_args = {
 
        'http_server': 'waitress',
 
        'lang': 'en',
 
        'database_engine': 'sqlite',
 
        'host': '127.0.0.1',
 
        'port': 5000,
 
        'error_aggregation_service': None,
 
    }
 
    if other:
 
        # parse arguments, we assume only first is correct
 
        kwargs = {}
 
        for el in _escape_split(other[0], ','):
 
            kv = _escape_split(el, '=')
 
            if len(kv) == 2:
 
                k, v = kv
 
                kwargs[k] = v
 
        # update our template stored args
 
        tmpl_stored_args.update(kwargs)
 

	
 
    # use default that cannot be replaced
 
    tmpl_stored_args.update({
 
        'uuid': lambda: uuid.uuid4().hex,
 
        'here': os.path.abspath(os.curdir),
 
    })
 
    if args.show_defaults:
 
        for k,v in tmpl_stored_args.iteritems():
 
            print '%s=%s' % (k, v)
 
        sys.exit(0)
 
    try:
 
        # built in template
 
        tmpl_file = os.path.join(here, TMPL)
 
        if args.template:
 
            tmpl_file = args.template
 

	
 
        with open(tmpl_file, 'rb') as f:
 
            tmpl_data = f.read().decode('utf-8')
 
            if args.raw:
 
                tmpl = tmpl_data
 
            else:
 
                tmpl = Template(tmpl_data).render(**tmpl_stored_args)
 
        with open(args.filename, 'wb') as f:
 
            f.write(tmpl.encode('utf-8'))
 
        print 'Wrote new config file in %s' % (os.path.abspath(args.filename))
 

	
 
    except Exception:
 
        from mako import exceptions
 
        print exceptions.text_error_template().render()
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    return _run(argv)
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_gist.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_gist
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Gist CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import sys
 
import stat
 
import argparse
 
import fileinput
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] [GIST OPTIONS] "
 
      "[filename or stdin use - for terminal stdin ]\n"
 
      "Create config file: kallithea-gist --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea Gist cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea')
 
    group.add_argument('--save-config', action='store_true',
 
                       help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('GIST')
 
    group.add_argument('-p', '--private', action='store_true',
 
                       help='create private Gist')
 
    group.add_argument('-f', '--filename',
 
                       help='set uploaded gist filename, '
 
                            'also defines syntax highlighting')
 
    group.add_argument('-d', '--description', help='Gist description')
 
    group.add_argument('-l', '--lifetime', metavar='MINUTES',
 
                       help='gist lifetime in minutes, -1 (DEFAULT) is forever')
 
    group.add_argument('--format', dest='format', type=str,
 
                       help='output format DEFAULT: `%s` can '
 
                       'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _run(argv):
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    host = args.apihost or conf['apihost']
 
    DEFAULT_FILENAME = 'gistfile1.txt'
 
    if other:
 
        # skip multifiles for now
 
        filename = other[0]
 
        if filename == '-':
 
            filename = DEFAULT_FILENAME
 
            gist_content = ''
 
            for line in fileinput.input('-'):
 
                gist_content += line
 
        else:
 
            with open(filename, 'rb') as f:
 
                gist_content = f.read()
 

	
 
    else:
 
        filename = DEFAULT_FILENAME
 
        gist_content = None
 
        # little bit hacky but cross platform check where the
 
        # stdin comes from we skip the terminal case it can be handled by '-'
 
        mode = os.fstat(0).st_mode
 
        if stat.S_ISFIFO(mode):
 
            # "stdin is piped"
 
            gist_content = sys.stdin.read()
 
        elif stat.S_ISREG(mode):
 
            # "stdin is redirected"
 
            gist_content = sys.stdin.read()
 
        else:
 
            # "stdin is terminal"
 
            pass
 

	
 
    # make sure we don't upload binary stuff
 
    if gist_content and '\0' in gist_content:
 
        raise Exception('Error: binary files upload is not possible')
 

	
 
    filename = os.path.basename(args.filename or filename)
 
    if gist_content:
 
        files = {
 
            filename: {
 
                'content': gist_content,
 
                'lexer': None
 
            }
 
        }
 

	
 
        margs = dict(
 
            lifetime=args.lifetime,
 
            description=args.description,
 
            gist_type='private' if args.private else 'public',
 
            files=files
 
        )
 

	
 
        json_data = api_call(apikey, host, 'create_gist', **margs)['result']
 
        if args.format == FORMAT_JSON:
 
            print json.dumps(json_data)
 
        elif args.format == FORMAT_PRETTY:
 
            print json_data
 
            print 'Created %s gist %s' % (json_data['gist']['type'],
 
                                          json_data['gist']['url'])
 
    return 0
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    try:
 
        return _run(argv)
 
    except Exception as e:
 
        print e
 
        return 1
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/config/routing.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Routes configuration
 

	
 
The more specific and detailed routes should be defined first so they
 
may take precedent over the more generic routes. For more information
 
refer to the routes manual at http://routes.groovie.org/docs/
 
"""
 

	
 
from __future__ import with_statement
 
from routes import Mapper
 

	
 
# prefix for non repository related links needs to be prefixed with `/`
 
ADMIN_PREFIX = '/_admin'
 

	
 

	
 
def make_map(config):
 
    """Create, configure and return the routes Mapper"""
 
    rmap = Mapper(directory=config['pylons.paths']['controllers'],
 
                  always_scan=config['debug'])
 
    rmap.minimization = False
 
    rmap.explicit = False
 

	
 
    from kallithea.lib.utils import (is_valid_repo, is_valid_repo_group,
 
                                     get_repo_by_id)
 

	
 
    def check_repo(environ, match_dict):
 
        """
 
        check for valid repository for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_name = match_dict.get('repo_name')
 

	
 
        if match_dict.get('f_path'):
 
            #fix for multiple initial slashes that causes errors
 
            match_dict['f_path'] = match_dict['f_path'].lstrip('/')
 

	
 
        by_id_match = get_repo_by_id(repo_name)
 
        if by_id_match:
 
            repo_name = by_id_match
 
            match_dict['repo_name'] = repo_name
 

	
 
        return is_valid_repo(repo_name, config['base_path'])
 

	
 
    def check_group(environ, match_dict):
 
        """
 
        check for valid repository group for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_group_name = match_dict.get('group_name')
 
        return is_valid_repo_group(repo_group_name, config['base_path'])
 

	
 
    def check_group_skip_path(environ, match_dict):
 
        """
 
        check for valid repository group for proper 404 handling, but skips
 
        verification of existing path
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_group_name = match_dict.get('group_name')
 
        return is_valid_repo_group(repo_group_name, config['base_path'],
 
                                   skip_path_check=True)
 

	
 
    def check_user_group(environ, match_dict):
 
        """
 
        check for valid user group for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        return True
 

	
 
    def check_int(environ, match_dict):
 
        return match_dict.get('id').isdigit()
 

	
 
    # The ErrorController route (handles 404/500 error pages); it should
 
    # likely stay at the top, ensuring it can always be resolved
 
    rmap.connect('/error/{action}', controller='error')
 
    rmap.connect('/error/{action}/{id}', controller='error')
 

	
 
    #==========================================================================
 
    # CUSTOM ROUTES HERE
 
    #==========================================================================
 

	
 
    #MAIN PAGE
 
    rmap.connect('home', '/', controller='home', action='index')
 
    rmap.connect('about', '/about', controller='home', action='about')
 
    rmap.connect('repo_switcher_data', '/_repos', controller='home',
 
                 action='repo_switcher_data')
 

	
 
    rmap.connect('rst_help',
 
                 "http://docutils.sourceforge.net/docs/user/rst/quickref.html",
 
                 _static=True)
 
    rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True)
 
    rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True)
 

	
 
    #ADMIN REPOSITORY ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/repos') as m:
 
        m.connect("repos", "/repos",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("repos", "/repos",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_repo", "/create_repository",
 
                  action="create_repository", conditions=dict(method=["GET"]))
 
        m.connect("put_repo", "/repos/{repo_name:.*?}",
 
                  action="update", conditions=dict(method=["PUT"],
 
                  function=check_repo))
 
        m.connect("delete_repo", "/repos/{repo_name:.*?}",
 
                  action="delete", conditions=dict(method=["DELETE"],
 
                  ))
 

	
 
    #ADMIN REPOSITORY GROUPS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/repo_groups') as m:
 
        m.connect("repos_groups", "/repo_groups",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("repos_groups", "/repo_groups",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_repos_group", "/repo_groups/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
 
                  action="update", conditions=dict(method=["PUT"],
 
                                                   function=check_group))
 

	
 
        m.connect("repos_group", "/repo_groups/{group_name:.*?}",
 
                  action="show", conditions=dict(method=["GET"],
 
                                                 function=check_group))
 

	
 
        #EXTRAS REPO GROUP ROUTES
 
        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
 
                  action="edit",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
 
                  action="edit",
 
                  conditions=dict(method=["PUT"], function=check_group))
 

	
 
        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
 
                  action="edit_repo_group_advanced",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
 
                  action="edit_repo_group_advanced",
 
                  conditions=dict(method=["PUT"], function=check_group))
 

	
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="edit_repo_group_perms",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="update_perms",
 
                  conditions=dict(method=["PUT"], function=check_group))
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="delete_perms",
 
                  conditions=dict(method=["DELETE"], function=check_group))
 

	
 
        m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}",
 
                  action="delete", conditions=dict(method=["DELETE"],
 
                                                   function=check_group_skip_path))
 

	
 

	
 
    #ADMIN USER ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/users') as m:
 
        m.connect("users", "/users",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("users", "/users",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("formatted_users", "/users.{format}",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_user", "/users/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_user", "/users/{id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("delete_user", "/users/{id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_user", "/users/{id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 
        m.connect("user", "/users/{id}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
        #EXTRAS USER ROUTES
 
        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
 
                  action="edit_advanced", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
 
                  action="update_advanced", conditions=dict(method=["PUT"]))
 

	
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="edit_api_keys", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="add_api_key", conditions=dict(method=["POST"]))
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="delete_api_key", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
 
                  action="edit_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
 
                  action="update_perms", conditions=dict(method=["PUT"]))
 

	
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="edit_emails", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="add_email", conditions=dict(method=["PUT"]))
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="delete_email", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="edit_ips", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="add_ip", conditions=dict(method=["PUT"]))
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="delete_ip", conditions=dict(method=["DELETE"]))
 

	
 
    #ADMIN USER GROUPS REST ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/user_groups') as m:
 
        m.connect("users_groups", "/user_groups",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("users_groups", "/user_groups",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_users_group", "/user_groups/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_users_group", "/user_groups/{id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("delete_users_group", "/user_groups/{id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_users_group", "/user_groups/{id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]),
 
                  function=check_user_group)
 
        m.connect("users_group", "/user_groups/{id}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
        #EXTRAS USER GROUP ROUTES
 
        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
 
                  action="edit_default_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
 
                  action="update_default_perms", conditions=dict(method=["PUT"]))
 

	
 

	
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="edit_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="update_perms", conditions=dict(method=["PUT"]))
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="delete_perms", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced",
 
                  action="edit_advanced", conditions=dict(method=["GET"]))
 

	
 
        m.connect("edit_user_group_members", "/user_groups/{id}/edit/members",
 
                  action="edit_members", conditions=dict(method=["GET"]))
 

	
 

	
 

	
 
    #ADMIN PERMISSIONS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/permissions') as m:
 
        m.connect("admin_permissions", "/permissions",
 
                  action="permission_globals", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions", "/permissions",
 
                  action="permission_globals", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_permissions_ips", "/permissions/ips",
 
                  action="permission_ips", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions_ips", "/permissions/ips",
 
                  action="permission_ips", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_permissions_perms", "/permissions/perms",
 
                  action="permission_perms", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions_perms", "/permissions/perms",
 
                  action="permission_perms", conditions=dict(method=["GET"]))
 

	
 

	
 
    #ADMIN DEFAULTS REST ROUTES
 
    rmap.resource('default', 'defaults',
 
                  controller='admin/defaults', path_prefix=ADMIN_PREFIX)
 

	
 
    #ADMIN AUTH SETTINGS
 
    rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX,
 
                 controller='admin/auth_settings', action='auth_settings',
 
                 conditions=dict(method=["POST"]))
 
    rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX,
 
                 controller='admin/auth_settings')
 

	
 
    #ADMIN SETTINGS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/settings') as m:
 
        m.connect("admin_settings", "/settings",
 
                  action="settings_vcs", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings", "/settings",
 
                  action="settings_vcs", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_mapping", "/settings/mapping",
 
                  action="settings_mapping", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_mapping", "/settings/mapping",
 
                  action="settings_mapping", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_global", "/settings/global",
 
                  action="settings_global", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_global", "/settings/global",
 
                  action="settings_global", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_visual", "/settings/visual",
 
                  action="settings_visual", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_visual", "/settings/visual",
 
                  action="settings_visual", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_email", "/settings/email",
 
                  action="settings_email", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_email", "/settings/email",
 
                  action="settings_email", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["DELETE"]))
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_search", "/settings/search",
 
                  action="settings_search", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_search", "/settings/search",
 
                  action="settings_search", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_system", "/settings/system",
 
                  action="settings_system", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_system", "/settings/system",
 
                  action="settings_system", conditions=dict(method=["GET"]))
 
        m.connect("admin_settings_system_update", "/settings/system/updates",
 
                  action="settings_system_update", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN MY ACCOUNT
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/my_account') as m:
 

	
 
        m.connect("my_account", "/my_account",
 
                  action="my_account", conditions=dict(method=["GET"]))
 
        m.connect("my_account", "/my_account",
 
                  action="my_account", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_password", "/my_account/password",
 
                  action="my_account_password", conditions=dict(method=["GET"]))
 
        m.connect("my_account_password", "/my_account/password",
 
                  action="my_account_password", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_repos", "/my_account/repos",
 
                  action="my_account_repos", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_watched", "/my_account/watched",
 
                  action="my_account_watched", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_perms", "/my_account/perms",
 
                  action="my_account_perms", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails", conditions=dict(method=["GET"]))
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails_add", conditions=dict(method=["POST"]))
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails_delete", conditions=dict(method=["DELETE"]))
 

	
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys", conditions=dict(method=["GET"]))
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys_add", conditions=dict(method=["POST"]))
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys_delete", conditions=dict(method=["DELETE"]))
 

	
 
    #NOTIFICATION REST ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/notifications') as m:
 
        m.connect("notifications", "/notifications",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("notifications", "/notifications",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("notifications_mark_all_read", "/notifications/mark_all_read",
 
                  action="mark_all_read", conditions=dict(method=["GET"]))
 
        m.connect("formatted_notifications", "/notifications.{format}",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_notification", "/notifications/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("formatted_new_notification", "/notifications/new.{format}",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("/notifications/{notification_id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("/notifications/{notification_id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_notification", "/notifications/{notification_id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 
        m.connect("formatted_edit_notification",
 
                  "/notifications/{notification_id}.{format}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 
        m.connect("notification", "/notifications/{notification_id}",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_notification", "/notifications/{notification_id}.{format}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN GIST
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/gists') as m:
 
        m.connect("gists", "/gists",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("gists", "/gists",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_gist", "/gists/new",
 
                  action="new", conditions=dict(method=["GET"]))
 

	
 

	
 
        m.connect("/gists/{gist_id}",
 
                  action="update", conditions=dict(method=["PUT"]))
 
        m.connect("/gists/{gist_id}",
 
                  action="delete", conditions=dict(method=["DELETE"]))
 
        m.connect("edit_gist", "/gists/{gist_id}/edit",
 
                  action="edit", conditions=dict(method=["GET", "POST"]))
 
        m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision",
 
                  action="check_revision", conditions=dict(method=["POST"]))
 

	
 

	
 
        m.connect("gist", "/gists/{gist_id}",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("gist_rev", "/gists/{gist_id}/{revision}",
 
                  revision="tip",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}",
 
                  revision="tip",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}",
 
                  revision='tip',
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN MAIN PAGES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/admin') as m:
 
        m.connect('admin_home', '', action='index')
 
        m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
 
                  action='add_repo')
 
    #==========================================================================
 
    # API V2
 
    #==========================================================================
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='api/api') as m:
 
        m.connect('api', '/api')
 

	
 
    #USER JOURNAL
 
    rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
 
                 controller='journal', action='index')
 
    rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
 
                 controller='journal', action='journal_rss')
 
    rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
 
                 controller='journal', action='journal_atom')
 

	
 
    rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal")
 

	
 
    rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal_rss")
 

	
 
    rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal_rss")
 

	
 
    rmap.connect('public_journal_atom',
 
                 '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal',
 
                 action="public_journal_atom")
 

	
 
    rmap.connect('public_journal_atom_old',
 
                 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
 
                 action="public_journal_atom")
 

	
 
    rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
 
                 controller='journal', action='toggle_following',
 
                 conditions=dict(method=["POST"]))
 

	
 
    #SEARCH
 
    rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
 
    rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX,
 
                 controller='search',
 
                 conditions=dict(function=check_repo))
 
    rmap.connect('search_repo', '/{repo_name:.*?}/search',
 
                 controller='search',
 
                 conditions=dict(function=check_repo),
 
                 )
 

	
 
    #LOGIN/LOGOUT/REGISTER/SIGN IN
 
    rmap.connect('authentication_token', '%s/authentication_token' % ADMIN_PREFIX, controller='login', action='authentication_token')
 
    rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
 
    rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
 
                 action='logout')
 

	
 
    rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
 
                 action='register')
 

	
 
    rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
 
                 controller='login', action='password_reset')
 

	
 
    rmap.connect('reset_password_confirmation',
 
                 '%s/password_reset_confirmation' % ADMIN_PREFIX,
 
                 controller='login', action='password_reset_confirmation')
 

	
 
    #FEEDS
 
    rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss',
 
                controller='feed', action='rss',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom',
 
                controller='feed', action='atom',
 
                conditions=dict(function=check_repo))
 

	
 
    #==========================================================================
 
    # REPOSITORY ROUTES
 
    #==========================================================================
 
    rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
 
                controller='admin/repos', action='repo_creating')
 
    rmap.connect('repo_check_home', '/{repo_name:.*?}/crepo_check',
 
                controller='admin/repos', action='repo_check')
 

	
 
    rmap.connect('summary_home', '/{repo_name:.*?}',
 
                controller='summary',
 
                conditions=dict(function=check_repo))
 

	
 
    # must be here for proper group/repo catching
 
    rmap.connect('repos_group_home', '/{group_name:.*}',
 
                controller='admin/repo_groups', action="show_by_name",
 
                conditions=dict(function=check_group))
 
    rmap.connect('repo_stats_home', '/{repo_name:.*?}/statistics',
 
                controller='summary', action='statistics',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('repo_size', '/{repo_name:.*?}/repo_size',
 
                controller='summary', action='repo_size',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('branch_tag_switcher', '/{repo_name:.*?}/branches-tags',
 
                 controller='home', action='branch_tag_switcher')
 
    rmap.connect('repo_refs_data', '/{repo_name:.*?}/refs-data',
 
                 controller='home', action='repo_refs_data')
 

	
 
    rmap.connect('changeset_home', '/{repo_name:.*?}/changeset/{revision:.*}',
 
                controller='changeset', revision='tip',
 
                conditions=dict(function=check_repo))
 
    rmap.connect('changeset_children', '/{repo_name:.*?}/changeset_children/{revision}',
 
                controller='changeset', revision='tip', action="changeset_children",
 
                conditions=dict(function=check_repo))
 
    rmap.connect('changeset_parents', '/{repo_name:.*?}/changeset_parents/{revision}',
 
                controller='changeset', revision='tip', action="changeset_parents",
 
                conditions=dict(function=check_repo))
 

	
 
    # repo edit options
 
    rmap.connect("edit_repo", "/{repo_name:.*?}/settings",
 
                 controller='admin/repos', action="edit",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions_update",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 
    rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions_revoke",
 
                 conditions=dict(method=["DELETE"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields",
 
                 controller='admin/repos', action="edit_fields",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new",
 
                 controller='admin/repos', action="create_repo_field",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 
    rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}",
 
                 controller='admin/repos', action="delete_repo_field",
 
                 conditions=dict(method=["DELETE"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced",
 
                 controller='admin/repos', action="edit_advanced",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_locking", "/{repo_name:.*?}/settings/advanced/locking",
 
                 controller='admin/repos', action="edit_advanced_locking",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 
    rmap.connect('toggle_locking', "/{repo_name:.*?}/settings/advanced/locking_toggle",
 
                 controller='admin/repos', action="toggle_locking",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal",
 
                 controller='admin/repos', action="edit_advanced_journal",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork",
 
                 controller='admin/repos', action="edit_advanced_fork",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
 
                 controller='admin/repos', action="edit_caches",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
 
                 controller='admin/repos', action="edit_caches",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
 
                 controller='admin/repos', action="edit_remote",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
 
                 controller='admin/repos', action="edit_remote",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
 
                 controller='admin/repos', action="edit_statistics",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
 
                 controller='admin/repos', action="edit_statistics",
 
                 conditions=dict(method=["PUT"], function=check_repo))
 

	
 
    #still working url for backward compat.
 
    rmap.connect('raw_changeset_home_depraced',
 
                 '/{repo_name:.*?}/raw-changeset/{revision}',
 
                 controller='changeset', action='changeset_raw',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    ## new URLs
 
    rmap.connect('changeset_raw_home',
 
                 '/{repo_name:.*?}/changeset-diff/{revision}',
 
                 controller='changeset', action='changeset_raw',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_patch_home',
 
                 '/{repo_name:.*?}/changeset-patch/{revision}',
 
                 controller='changeset', action='changeset_patch',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_download_home',
 
                 '/{repo_name:.*?}/changeset-download/{revision}',
 
                 controller='changeset', action='changeset_download',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_comment',
 
                 '/{repo_name:.*?}/changeset-comment/{revision}',
 
                controller='changeset', revision='tip', action='comment',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_comment_preview',
 
                 '/{repo_name:.*?}/changeset-comment-preview',
 
                controller='changeset', action='preview_comment',
 
                conditions=dict(function=check_repo, method=["POST"]))
 

	
 
    rmap.connect('changeset_comment_delete',
 
                 '/{repo_name:.*?}/changeset-comment-delete/{comment_id}',
 
                controller='changeset', action='delete_comment',
 
                conditions=dict(function=check_repo, method=["DELETE"]))
 

	
 
    rmap.connect('changeset_info', '/changeset_info/{repo_name:.*?}/{revision}',
 
                 controller='changeset', action='changeset_info')
 

	
 
    rmap.connect('compare_home',
 
                 '/{repo_name:.*?}/compare',
 
                 controller='compare', action='index',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('compare_url',
 
                 '/{repo_name:.*?}/compare/{org_ref_type}@{org_ref_name:.*?}...{other_ref_type}@{other_ref_name:.*?}',
 
                 controller='compare', action='compare',
 
                 conditions=dict(function=check_repo),
 
                 requirements=dict(
 
                            org_ref_type='(branch|book|tag|rev|__other_ref_type__)',
 
                            other_ref_type='(branch|book|tag|rev|__org_ref_type__)')
 
                 )
 

	
 
    rmap.connect('pullrequest_home',
 
                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
 
                 action='index', conditions=dict(function=check_repo,
 
                                                 method=["GET"]))
 

	
 
    rmap.connect('pullrequest_repo_info',
 
                 '/{repo_name:.*?}/pull-request-repo-info',
 
                 controller='pullrequests', action='repo_info',
 
                 conditions=dict(function=check_repo, method=["GET"]))
 

	
 
    rmap.connect('pullrequest',
 
                 '/{repo_name:.*?}/pull-request/new', controller='pullrequests',
 
                 action='create', conditions=dict(function=check_repo,
 
                                                  method=["POST"]))
 

	
 
    rmap.connect('pullrequest_show',
 
                 '/{repo_name:.*?}/pull-request/{pull_request_id:\\d+}{extra:(/.*)?}', extra='',
 
                 controller='pullrequests',
 
                 action='show', conditions=dict(function=check_repo,
 
                                                method=["GET"]))
 
    rmap.connect('pullrequest_post',
 
                 '/{repo_name:.*?}/pull-request/{pull_request_id}',
 
                 controller='pullrequests',
 
                 action='post', conditions=dict(function=check_repo,
 
                                                method=["POST"]))
 
    rmap.connect('pullrequest_delete',
 
                 '/{repo_name:.*?}/pull-request/{pull_request_id}',
 
                 controller='pullrequests',
 
                 action='delete', conditions=dict(function=check_repo,
 
                                                method=["DELETE"]))
 

	
 
    rmap.connect('pullrequest_show_all',
 
                 '/{repo_name:.*?}/pull-request',
 
                 controller='pullrequests',
 
                 action='show_all', conditions=dict(function=check_repo,
 
                                                method=["GET"]))
 

	
 
    rmap.connect('my_pullrequests',
 
                 '/my_pullrequests',
 
                 controller='pullrequests',
 
                 action='show_my', conditions=dict(method=["GET"]))
 

	
 
    rmap.connect('pullrequest_comment',
 
                 '/{repo_name:.*?}/pull-request-comment/{pull_request_id}',
 
                 controller='pullrequests',
 
                 action='comment', conditions=dict(function=check_repo,
 
                                                method=["POST"]))
 

	
 
    rmap.connect('pullrequest_comment_delete',
 
                 '/{repo_name:.*?}/pull-request-comment/{comment_id}/delete',
 
                controller='pullrequests', action='delete_comment',
 
                conditions=dict(function=check_repo, method=["DELETE"]))
 

	
 
    rmap.connect('summary_home_summary', '/{repo_name:.*?}/summary',
 
                controller='summary', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('branches_home', '/{repo_name:.*?}/branches',
 
                controller='branches', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('tags_home', '/{repo_name:.*?}/tags',
 
                controller='tags', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('bookmarks_home', '/{repo_name:.*?}/bookmarks',
 
                controller='bookmarks', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_home', '/{repo_name:.*?}/changelog',
 
                controller='changelog', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_summary_home', '/{repo_name:.*?}/changelog_summary',
 
                controller='changelog', action='changelog_summary',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_file_home', '/{repo_name:.*?}/changelog/{revision}/{f_path:.*}',
 
                controller='changelog', f_path=None,
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changelog_details', '/{repo_name:.*?}/changelog_details/{cs}',
 
                controller='changelog', action='changelog_details',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_home', '/{repo_name:.*?}/files/{revision}/{f_path:.*}',
 
                controller='files', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_home_nopath', '/{repo_name:.*?}/files/{revision}',
 
                controller='files', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_history_home',
 
                 '/{repo_name:.*?}/history/{revision}/{f_path:.*}',
 
                 controller='files', action='history', revision='tip', f_path='',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_authors_home',
 
                 '/{repo_name:.*?}/authors/{revision}/{f_path:.*}',
 
                 controller='files', action='authors', revision='tip', f_path='',
 
                 conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_diff_home', '/{repo_name:.*?}/diff/{f_path:.*}',
 
                controller='files', action='diff', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_diff_2way_home', '/{repo_name:.*?}/diff-2way/{f_path:.+}',
 
                controller='files', action='diff_2way', revision='tip', f_path='',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('files_rawfile_home',
kallithea/controllers/files.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.files
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Files controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import logging
 
import traceback
 
import tempfile
 
import shutil
 

	
 
from pylons import request, response, tmpl_context as c, url
 
from pylons.i18n.translation import _
 
from pylons.controllers.util import redirect
 
from kallithea.lib.utils import jsonify, action_logger
 

	
 
from kallithea.lib import diffs
 
from kallithea.lib import helpers as h
 

	
 
from kallithea.lib.compat import OrderedDict
 
from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_str,\
 
    str2bool
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.conf import settings
 
from kallithea.lib.vcs.exceptions import RepositoryError, \
 
    ChangesetDoesNotExistError, EmptyRepositoryError, \
 
    ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,\
 
    NodeDoesNotExistError, ChangesetError, NodeError
 
from kallithea.lib.vcs.nodes import FileNode
 

	
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.db import Repository
 

	
 
from kallithea.controllers.changeset import anchor_url, _ignorews_url,\
 
    _context_url, get_line_ctx, get_ignore_ws
 
from webob.exc import HTTPNotFound
 
from kallithea.lib.exceptions import NonRelativePathError
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class FilesController(BaseRepoController):
 

	
 
    def __before__(self):
 
        super(FilesController, self).__before__()
 
        c.cut_off_limit = self.cut_off_limit
 

	
 
    def __get_cs(self, rev, silent_empty=False):
 
        """
 
        Safe way to get changeset if error occur it redirects to tip with
 
        proper message
 

	
 
        :param rev: revision to fetch
 
        :silent_empty: return None if repository is empty
 
        """
 

	
 
        try:
 
            return c.db_repo_scm_instance.get_changeset(rev)
 
        except EmptyRepositoryError as e:
 
            if silent_empty:
 
                return None
 
            url_ = url('files_add_home',
 
                       repo_name=c.repo_name,
 
                       revision=0, f_path='', anchor='edit')
 
            add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link")
 
            h.flash(h.literal(_('There are no files yet. %s') % add_new),
 
                    category='warning')
 
            raise HTTPNotFound()
 
        except(ChangesetDoesNotExistError, LookupError), e:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
    def __get_filenode(self, cs, path):
 
        """
 
        Returns file_node or raise HTTP error.
 

	
 
        :param cs: given changeset
 
        :param path: path to lookup
 
        """
 

	
 
        try:
 
            file_node = cs.get_node(path)
 
            if file_node.is_dir():
 
                raise RepositoryError('given path is a directory')
 
        except(ChangesetDoesNotExistError,), e:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
        return file_node
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name, revision, f_path, annotate=False):
 
        # redirect to given revision from form if given
 
        post_revision = request.POST.get('at_rev', None)
 
        if post_revision:
 
            cs = self.__get_cs(post_revision) # FIXME - unused!
 

	
 
        c.revision = revision
 
        c.changeset = self.__get_cs(revision)
 
        c.branch = request.GET.get('branch', None)
 
        c.f_path = f_path
 
        c.annotate = annotate
 
        cur_rev = c.changeset.revision
 

	
 
        # prev link
 
        try:
 
            prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
 
            c.url_prev = url('files_home', repo_name=c.repo_name,
 
                         revision=prev_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_prev += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_prev = '#'
 

	
 
        # next link
 
        try:
 
            next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
 
            c.url_next = url('files_home', repo_name=c.repo_name,
 
                     revision=next_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_next += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_next = '#'
 

	
 
        # files or dirs
 
        try:
 
            c.file = c.changeset.get_node(f_path)
 

	
 
            if c.file.is_file():
 
                c.load_full_history = False
 
                file_last_cs = c.file.last_changeset
 
                c.file_changeset = (c.changeset
 
                                    if c.changeset.revision < file_last_cs.revision
 
                                    else file_last_cs)
 
                #determine if we're on branch head
 
                _branches = c.db_repo_scm_instance.branches
 
                c.on_branch_head = revision in _branches.keys() + _branches.values()
 
                _hist = []
 
                c.file_history = []
 
                if c.load_full_history:
 
                    c.file_history, _hist = self._get_node_history(c.changeset, f_path)
 

	
 
                c.authors = []
 
                for a in set([x.author for x in _hist]):
 
                    c.authors.append((h.email(a), h.person(a)))
 
            else:
 
                c.authors = c.file_history = []
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return render('files/files_ypjax.html')
 

	
 
        # TODO: tags and bookmarks?
 
        c.revision_options = [(c.changeset.raw_id,
 
                              _('%s at %s') % (c.changeset.branch, h.short_id(c.changeset.raw_id)))] + \
 
            [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
 
        if c.db_repo_scm_instance.closed_branches:
 
            prefix = _('(closed)') + ' '
 
            c.revision_options += [('-', '-')] + \
 
                [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()]
 

	
 
        return render('files/files.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def history(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        f_path = f_path
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 

	
 
            res = []
 
            for obj in file_history:
 
                res.append({
 
                    'text': obj[1],
 
                    'children': [{'id': o[0], 'text': o[1]} for o in obj[0]]
 
                })
 

	
 
            data = {
 
                'more': False,
 
                'results': res
 
            }
 
            return data
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def authors(self, repo_name, revision, f_path):
 
        changeset = self.__get_cs(revision)
 
        f_path = f_path
 
        _file = changeset.get_node(f_path)
 
        if _file.is_file():
 
            file_history, _hist = self._get_node_history(changeset, f_path)
 
            c.authors = []
 
            for a in set([x.author for x in _hist]):
 
                c.authors.append((h.email(a), h.person(a)))
 
            return render('files/files_history_box.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def rawfile(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        response.content_disposition = 'attachment; filename=%s' % \
 
            safe_str(f_path.split(Repository.url_sep())[-1])
 

	
 
        response.content_type = file_node.mimetype
 
        return file_node.content
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def raw(self, repo_name, revision, f_path):
 
        cs = self.__get_cs(revision)
 
        file_node = self.__get_filenode(cs, f_path)
 

	
 
        raw_mimetype_mapping = {
 
            # map original mimetype to a mimetype used for "show as raw"
 
            # you can also provide a content-disposition to override the
 
            # default "attachment" disposition.
 
            # orig_type: (new_type, new_dispo)
 

	
 
            # show images inline:
 
            'image/x-icon': ('image/x-icon', 'inline'),
 
            'image/png': ('image/png', 'inline'),
 
            'image/gif': ('image/gif', 'inline'),
 
            'image/jpeg': ('image/jpeg', 'inline'),
 
            'image/svg+xml': ('image/svg+xml', 'inline'),
 
        }
 

	
 
        mimetype = file_node.mimetype
 
        try:
 
            mimetype, dispo = raw_mimetype_mapping[mimetype]
 
        except KeyError:
 
            # we don't know anything special about this, handle it safely
 
            if file_node.is_binary:
 
                # do same as download raw for binary files
 
                mimetype, dispo = 'application/octet-stream', 'attachment'
 
            else:
 
                # do not just use the original mimetype, but force text/plain,
 
                # otherwise it would serve text/html and that might be unsafe.
 
                # Note: underlying vcs library fakes text/plain mimetype if the
 
                # mimetype can not be determined and it thinks it is not
 
                # binary.This might lead to erroneous text display in some
 
                # cases, but helps in other cases, like with text files
 
                # without extension.
 
                mimetype, dispo = 'text/plain', 'inline'
 

	
 
        if dispo == 'attachment':
 
            dispo = 'attachment; filename=%s' % \
 
                        safe_str(f_path.split(os.sep)[-1])
 

	
 
        response.content_disposition = dispo
 
        response.content_type = mimetype
 
        return file_node.content
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def delete(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        if repo.enable_locking and repo.locked[0]:
 
            h.flash(_('This repository has been locked by %s on %s')
 
                % (h.person_by_id(repo.locked[0]),
 
                   h.fmt_date(h.time_to_datetime(repo.locked[1]))),
 
                'warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip'))
 

	
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches.keys() + _branches.values():
 
            h.flash(_('You can only delete files with revision '
 
                      'being a valid branch '), category='warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 

	
 
        c.default_message = _('Deleted file %s via Kallithea') % (f_path)
 
        c.f_path = f_path
 
        node_path = f_path
 
        author = self.authuser.full_contact
 

	
 
        if r_post:
 
            message = r_post.get('message') or c.default_message
 

	
 
            try:
 
                nodes = {
 
                    node_path: {
 
                        'content': ''
 
                    }
 
                }
 
                self.scm_model.delete_nodes(
 
                    user=c.authuser.user_id, repo=c.db_repo,
 
                    message=message,
 
                    nodes=nodes,
 
                    parent_cs=c.cs,
 
                    author=author,
 
                )
 

	
 
                h.flash(_('Successfully deleted file %s') % f_path,
 
                        category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_delete.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def edit(self, repo_name, revision, f_path):
 
        repo = c.db_repo
 
        if repo.enable_locking and repo.locked[0]:
 
            h.flash(_('This repository has been locked by %s on %s')
 
                % (h.person_by_id(repo.locked[0]),
 
                   h.fmt_date(h.time_to_datetime(repo.locked[1]))),
 
                'warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip'))
 

	
 
        # check if revision is a branch identifier- basically we cannot
 
        # create multiple heads via file editing
 
        _branches = repo.scm_instance.branches
 
        # check if revision is a branch name or branch hash
 
        if revision not in _branches.keys() + _branches.values():
 
            h.flash(_('You can only edit files with revision '
 
                      'being a valid branch '), category='warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip',
 
                                  f_path=f_path))
 

	
 
        r_post = request.POST
 

	
 
        c.cs = self.__get_cs(revision)
 
        c.file = self.__get_filenode(c.cs, f_path)
 

	
 
        if c.file.is_binary:
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                            revision=c.cs.raw_id, f_path=f_path))
 
        c.default_message = _('Edited file %s via Kallithea') % (f_path)
 
        c.f_path = f_path
 

	
 
        if r_post:
 

	
 
            old_content = c.file.content
 
            sl = old_content.splitlines(1)
 
            first_line = sl[0] if sl else ''
 
            # modes:  0 - Unix, 1 - Mac, 2 - DOS
 
            mode = detect_mode(first_line, 0)
 
            content = convert_line_endings(r_post.get('content', ''), mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            author = self.authuser.full_contact
 

	
 
            if content == old_content:
 
                h.flash(_('No changes'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            try:
 
                self.scm_model.commit_change(repo=c.db_repo_scm_instance,
 
                                             repo_name=repo_name, cs=c.cs,
 
                                             user=self.authuser.user_id,
 
                                             author=author, message=message,
 
                                             content=content, f_path=f_path)
 
                h.flash(_('Successfully committed to %s') % f_path,
 
                        category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_edit.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
 
    def add(self, repo_name, revision, f_path):
 

	
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo.enable_locking and repo.locked[0]:
 
            h.flash(_('This repository has been locked by %s on %s')
 
                % (h.person_by_id(repo.locked[0]),
 
                   h.fmt_date(h.time_to_datetime(repo.locked[1]))),
 
                  'warning')
 
            return redirect(h.url('files_home',
 
                                  repo_name=repo_name, revision='tip'))
 

	
 
        r_post = request.POST
 
        c.cs = self.__get_cs(revision, silent_empty=True)
 
        if c.cs is None:
 
            c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias)
 
        c.default_message = (_('Added file via Kallithea'))
 
        c.f_path = f_path
 

	
 
        if r_post:
 
            unix_mode = 0
 
            content = convert_line_endings(r_post.get('content', ''), unix_mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            filename = r_post.get('filename')
 
            location = r_post.get('location', '')
 
            file_obj = r_post.get('upload_file', None)
 

	
 
            if file_obj is not None and hasattr(file_obj, 'filename'):
 
                filename = file_obj.filename
 
                content = file_obj.file
 

	
 
                if hasattr(content, 'file'):
 
                    # non posix systems store real file under file attr
 
                    content = content.file
 

	
 
            if not content:
 
                h.flash(_('No content'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            if not filename:
 
                h.flash(_('No filename'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            #strip all crap out of file, just leave the basename
 
            filename = os.path.basename(filename)
 
            node_path = os.path.join(location, filename)
 
            author = self.authuser.full_contact
 

	
 
            try:
 
                nodes = {
 
                    node_path: {
 
                        'content': content
 
                    }
 
                }
 
                self.scm_model.create_nodes(
 
                    user=c.authuser.user_id, repo=c.db_repo,
 
                    message=message,
 
                    nodes=nodes,
 
                    parent_cs=c.cs,
 
                    author=author,
 
                )
 

	
 
                h.flash(_('Successfully committed to %s') % node_path,
 
                        category='success')
 
            except NonRelativePathError as e:
 
                h.flash(_('Location must be relative path and must not '
 
                          'contain .. in path'), category='warning')
 
                return redirect(url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            except (NodeError, NodeAlreadyExistsError) as e:
 
                h.flash(_(e), category='error')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during commit'), category='error')
 
            return redirect(url('changeset_home',
 
                                repo_name=c.repo_name, revision='tip'))
 

	
 
        return render('files/files_add.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def archivefile(self, repo_name, fname):
 
        fileformat = None
 
        revision = None
 
        ext = None
 
        subrepos = request.GET.get('subrepos') == 'true'
 

	
 
        for a_type, ext_data in settings.ARCHIVE_SPECS.items():
 
            archive_spec = fname.split(ext_data[1])
 
            if len(archive_spec) == 2 and archive_spec[1] == '':
 
                fileformat = a_type or ext_data[1]
 
                revision = archive_spec[0]
 
                ext = ext_data[1]
 

	
 
        try:
 
            dbrepo = RepoModel().get_by_repo_name(repo_name)
 
            if not dbrepo.enable_downloads:
 
                return _('Downloads disabled') # TODO: do something else?
 

	
 
            if c.db_repo_scm_instance.alias == 'hg':
 
                # patch and reset hooks section of UI config to not run any
 
                # hooks on fetching archives with subrepos
 
                for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'):
 
                    c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None)
 

	
 
            cs = c.db_repo_scm_instance.get_changeset(revision)
 
            content_type = settings.ARCHIVE_SPECS[fileformat][0]
 
        except ChangesetDoesNotExistError:
 
            return _('Unknown revision %s') % revision
 
        except EmptyRepositoryError:
 
            return _('Empty repository')
 
        except (ImproperArchiveTypeError, KeyError):
 
            return _('Unknown archive type')
 

	
 
        from kallithea import CONFIG
 
        rev_name = cs.raw_id[:12]
 
        archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')),
 
                                    safe_str(rev_name), ext)
 

	
 
        archive_path = None
 
        cached_archive_path = None
 
        archive_cache_dir = CONFIG.get('archive_cache_dir')
 
        if archive_cache_dir and not subrepos: # TOOD: subrepo caching?
 
            if not os.path.isdir(archive_cache_dir):
 
                os.makedirs(archive_cache_dir)
 
            cached_archive_path = os.path.join(archive_cache_dir, archive_name)
 
            if os.path.isfile(cached_archive_path):
 
                log.debug('Found cached archive in %s', cached_archive_path)
 
                archive_path = cached_archive_path
 
            else:
 
                log.debug('Archive %s is not yet cached', archive_name)
 

	
 
        if archive_path is None:
 
            # generate new archive
 
            fd, archive_path = tempfile.mkstemp()
 
            log.debug('Creating new temp archive in %s', archive_path)
 
            with os.fdopen(fd, 'wb') as stream:
 
                cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos)
 
                # stream (and thus fd) has been closed by cs.fill_archive
 
            if cached_archive_path is not None:
 
                # we generated the archive - move it to cache
 
                log.debug('Storing new archive in %s', cached_archive_path)
 
                shutil.move(archive_path, cached_archive_path)
 
                archive_path = cached_archive_path
 

	
 
        def get_chunked_archive(archive_path):
 
            stream = open(archive_path, 'rb')
 
            while True:
 
                data = stream.read(16 * 1024)
 
                if not data:
 
                    break
 
                yield data
 
            stream.close()
 
            if archive_path != cached_archive_path:
 
                log.debug('Destroying temp archive %s', archive_path)
 
                os.remove(archive_path)
 

	
 
        action_logger(user=c.authuser,
 
                      action='user_downloaded_archive:%s' % (archive_name),
 
                      repo=repo_name, ipaddr=self.ip_addr, commit=True)
 

	
 
        response.content_disposition = str('attachment; filename=%s' % (archive_name))
 
        response.content_type = str(content_type)
 
        return get_chunked_archive(archive_path)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def diff(self, repo_name, f_path):
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = request.GET.get('context', 3)
 
        diff2 = request.GET.get('diff2', '')
 
        diff1 = request.GET.get('diff1', '') or diff2
 
        c.action = request.GET.get('diff')
 
        c.no_changes = diff1 == diff2
 
        c.f_path = f_path
 
        c.big_diff = False
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.changes = OrderedDict()
 
        c.changes[diff2] = []
 

	
 
        #special case if we want a show rev only, it's impl here
 
        #to reduce JS and callbacks
 

	
 
        if request.GET.get('show_rev'):
 
            if str2bool(request.GET.get('annotate', 'False')):
 
                _url = url('files_annotate_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 
            else:
 
                _url = url('files_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 

	
 
            return redirect(_url)
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
 
                try:
 
                    node1 = c.changeset_1.get_node(f_path)
 
                    if node1.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node1, type(node1)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_1 = EmptyChangeset(cs=diff1,
 
                                                   revision=c.changeset_1.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node1 = FileNode(f_path, '', changeset=c.changeset_1)
 
            else:
 
                c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node1 = FileNode(f_path, '', changeset=c.changeset_1)
 

	
 
            if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
 
                try:
 
                    node2 = c.changeset_2.get_node(f_path)
 
                    if node2.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node2, type(node2)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_2 = EmptyChangeset(cs=diff2,
 
                                                   revision=c.changeset_2.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
            else:
 
                c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
        except (RepositoryError, NodeError):
 
            log.error(traceback.format_exc())
 
            return redirect(url('files_home', repo_name=c.repo_name,
 
                                f_path=f_path))
 

	
 
        if c.action == 'download':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 

	
 
            diff_name = '%s_vs_%s.diff' % (diff1, diff2)
 
            response.content_type = 'text/plain'
 
            response.content_disposition = (
 
                'attachment; filename=%s' % diff_name
 
            )
 
            return diff.as_raw()
 

	
 
        elif c.action == 'raw':
 
            _diff = diffs.get_gitdiff(node1, node2,
 
                                      ignore_whitespace=ignore_whitespace,
 
                                      context=line_context)
 
            diff = diffs.DiffProcessor(_diff, format='gitdiff')
 
            response.content_type = 'text/plain'
 
            return diff.as_raw()
 

	
 
        else:
 
            fid = h.FID(diff2, node2.path)
 
            line_context_lcl = get_line_ctx(fid, request.GET)
 
            ign_whitespace_lcl = get_ignore_ws(fid, request.GET)
 

	
 
            lim = request.GET.get('fulldiff') or self.cut_off_limit
 
            _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1,
 
                                         filenode_new=node2,
 
                                         cut_off_limit=lim,
 
                                         ignore_whitespace=ign_whitespace_lcl,
 
                                         line_context=line_context_lcl,
 
                                         enable_comments=False)
 
            op = ''
 
            filename = node1.path
 
            cs_changes = {
 
                'fid': [cs1, cs2, op, filename, diff, st]
 
            }
 
            c.changes = cs_changes
 

	
 
        return render('files/file_diff.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def diff_2way(self, repo_name, f_path):
 
        diff1 = request.GET.get('diff1', '')
 
        diff2 = request.GET.get('diff2', '')
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
 
                try:
 
                    node1 = c.changeset_1.get_node(f_path)
 
                    if node1.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node1, type(node1)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_1 = EmptyChangeset(cs=diff1,
 
                                                   revision=c.changeset_1.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node1 = FileNode(f_path, '', changeset=c.changeset_1)
 
            else:
 
                c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node1 = FileNode(f_path, '', changeset=c.changeset_1)
 

	
 
            if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2)
 
                try:
 
                    node2 = c.changeset_2.get_node(f_path)
 
                    if node2.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node2, type(node2)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_2 = EmptyChangeset(cs=diff2,
 
                                                   revision=c.changeset_2.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
            else:
 
                c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance)
 
                node2 = FileNode(f_path, '', changeset=c.changeset_2)
 
        except ChangesetDoesNotExistError as e:
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        c.node1 = node1
 
        c.node2 = node2
 
        c.cs1 = c.changeset_1
 
        c.cs2 = c.changeset_2
 

	
 
        return render('files/diff_2way.html')
 

	
 
    def _get_node_history(self, cs, f_path, changesets=None):
 
        """
 
        get changesets history for given node
 

	
 
        :param cs: changeset to calculate history
 
        :param f_path: path for node to calculate history for
 
        :param changesets: if passed don't calculate history and take
 
            changesets defined in this list
 
        """
 
        # calculate history based on tip
 
        tip_cs = c.db_repo_scm_instance.get_changeset()
 
        if changesets is None:
 
            try:
 
                changesets = tip_cs.get_file_history(f_path)
 
            except (NodeDoesNotExistError, ChangesetError):
 
                #this node is not present at tip !
 
                changesets = cs.get_file_history(f_path)
 
        hist_l = []
 

	
 
        changesets_group = ([], _("Changesets"))
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 
        for chs in changesets:
 
            #_branch = '(%s)' % chs.branch if (cs.repository.alias == 'hg') else ''
 
            _branch = chs.branch
 
            n_desc = '%s (%s)' % (h.show_id(chs), _branch)
 
            changesets_group[0].append((chs.raw_id, n_desc,))
 
        hist_l.append(changesets_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.branches.items():
 
            branches_group[0].append((chs, name),)
 
        hist_l.append(branches_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.tags.items():
 
            tags_group[0].append((chs, name),)
 
        hist_l.append(tags_group)
 

	
 
        return hist_l, changesets
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def nodelist(self, repo_name, revision, f_path):
kallithea/lib/auth.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth
 
~~~~~~~~~~~~~~~~~~
 

	
 
authentication and permission libraries
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 
from __future__ import with_statement
 
import time
 
import os
 
import logging
 
import traceback
 
import hashlib
 
import itertools
 
import collections
 

	
 
from decorator import decorator
 

	
 
from pylons import url, request
 
from pylons.controllers.util import abort, redirect
 
from pylons.i18n.translation import _
 
from webhelpers.pylonslib import secure_form
 
from sqlalchemy import or_
 
from sqlalchemy.orm.exc import ObjectDeletedError
 
from sqlalchemy.orm import joinedload
 

	
 
from kallithea import __platform__, is_windows, is_unix
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model import meta
 
from kallithea.model.meta import Session
 
from kallithea.model.user import UserModel
 
from kallithea.model.db import User, Repository, Permission, \
 
    UserToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupMember, \
 
    RepoGroup, UserGroupRepoGroupToPerm, UserIpMap, UserGroupUserGroupToPerm, \
 
    UserGroup, UserApiKeys
 

	
 
from kallithea.lib.utils2 import safe_unicode, aslist
 
from kallithea.lib.utils import get_repo_slug, get_repo_group_slug, \
 
    get_user_group_slug, conditional_cache
 
from kallithea.lib.caching_query import FromCache
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PasswordGenerator(object):
 
    """
 
    This is a simple class for generating password from different sets of
 
    characters
 
    usage::
 

	
 
        passwd_gen = PasswordGenerator()
 
        #print 8-letter password containing only big and small letters
 
            of alphabet
 
        passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
 
    """
 
    ALPHABETS_NUM = r'''1234567890'''
 
    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
 
    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
 
    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
 
    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
 
        + ALPHABETS_NUM + ALPHABETS_SPECIAL
 
    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
 
    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
 
    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
 
    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
 

	
 
    def gen_password(self, length, alphabet=ALPHABETS_FULL):
 
        assert len(alphabet) <= 256, alphabet
 
        l = []
 
        while len(l) < length:
 
            i = ord(os.urandom(1))
 
            if i < len(alphabet):
 
                l.append(alphabet[i])
 
        return ''.join(l)
 

	
 

	
 
class KallitheaCrypto(object):
 

	
 
    @classmethod
 
    def hash_string(cls, str_):
 
        """
 
        Cryptographic function used for password hashing based on pybcrypt
 
        or Python's own OpenSSL wrapper on windows
 

	
 
        :param password: password to hash
 
        """
 
        if is_windows:
 
            return hashlib.sha256(str_).hexdigest()
 
        elif is_unix:
 
            import bcrypt
 
            return bcrypt.hashpw(str_, bcrypt.gensalt(10))
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 
    @classmethod
 
    def hash_check(cls, password, hashed):
 
        """
 
        Checks matching password with it's hashed value, runs different
 
        implementation based on platform it runs on
 

	
 
        :param password: password
 
        :param hashed: password in hashed form
 
        """
 

	
 
        if is_windows:
 
            return hashlib.sha256(password).hexdigest() == hashed
 
        elif is_unix:
 
            import bcrypt
 
            return bcrypt.hashpw(password, hashed) == hashed
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 

	
 
def get_crypt_password(password):
 
    return KallitheaCrypto.hash_string(password)
 

	
 

	
 
def check_password(password, hashed):
 
    return KallitheaCrypto.hash_check(password, hashed)
 

	
 

	
 

	
 
def _cached_perms_data(user_id, user_is_admin, user_inherit_default_permissions,
 
                       explicit, algo):
 
    RK = 'repositories'
 
    GK = 'repositories_groups'
 
    UK = 'user_groups'
 
    GLOBAL = 'global'
 
    PERM_WEIGHTS = Permission.PERM_WEIGHTS
 
    permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()}
 

	
 
    def _choose_perm(new_perm, cur_perm):
 
        new_perm_val = PERM_WEIGHTS[new_perm]
 
        cur_perm_val = PERM_WEIGHTS[cur_perm]
 
        if algo == 'higherwin':
 
            if new_perm_val > cur_perm_val:
 
                return new_perm
 
            return cur_perm
 
        elif algo == 'lowerwin':
 
            if new_perm_val < cur_perm_val:
 
                return new_perm
 
            return cur_perm
 

	
 
    #======================================================================
 
    # fetch default permissions
 
    #======================================================================
 
    default_user = User.get_by_username('default', cache=True)
 
    default_user_id = default_user.user_id
 

	
 
    default_repo_perms = Permission.get_default_perms(default_user_id)
 
    default_repo_groups_perms = Permission.get_default_group_perms(default_user_id)
 
    default_user_group_perms = Permission.get_default_user_group_perms(default_user_id)
 

	
 
    if user_is_admin:
 
        #==================================================================
 
        # admin users have all rights;
 
        # based on default permissions, just set everything to admin
 
        #==================================================================
 
        permissions[GLOBAL].add('hg.admin')
 
        permissions[GLOBAL].add('hg.create.write_on_repogroup.true')
 

	
 
        # repositories
 
        for perm in default_repo_perms:
 
            r_k = perm.UserRepoToPerm.repository.repo_name
 
            p = 'repository.admin'
 
            permissions[RK][r_k] = p
 

	
 
        # repository groups
 
        for perm in default_repo_groups_perms:
 
            rg_k = perm.UserRepoGroupToPerm.group.group_name
 
            p = 'group.admin'
 
            permissions[GK][rg_k] = p
 

	
 
        # user groups
 
        for perm in default_user_group_perms:
 
            u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
            p = 'usergroup.admin'
 
            permissions[UK][u_k] = p
 
        return permissions
 

	
 
    #==================================================================
 
    # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS
 
    #==================================================================
 

	
 
    # default global permissions taken from the default user
 
    default_global_perms = UserToPerm.query()\
 
        .filter(UserToPerm.user_id == default_user_id)\
 
        .options(joinedload(UserToPerm.permission))
 

	
 
    for perm in default_global_perms:
 
        permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # defaults for repositories, taken from default user
 
    for perm in default_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        if perm.Repository.private and not (perm.Repository.user_id == user_id):
 
            # disable defaults for private repos,
 
            p = 'repository.none'
 
        elif perm.Repository.user_id == user_id:
 
            # set admin if owner
 
            p = 'repository.admin'
 
        else:
 
            p = perm.Permission.permission_name
 

	
 
        permissions[RK][r_k] = p
 

	
 
    # defaults for repository groups taken from default user permission
 
    # on given group
 
    for perm in default_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        permissions[GK][rg_k] = p
 

	
 
    # defaults for user groups taken from default user permission
 
    # on given user group
 
    for perm in default_user_group_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        permissions[UK][u_k] = p
 

	
 
    #======================================================================
 
    # !! OVERRIDE GLOBALS !! with user permissions if any found
 
    #======================================================================
 
    # those can be configured from groups or users explicitly
 
    _configurable = set([
 
        'hg.fork.none', 'hg.fork.repository',
 
        'hg.create.none', 'hg.create.repository',
 
        'hg.usergroup.create.false', 'hg.usergroup.create.true'
 
    ])
 

	
 
    # USER GROUPS comes first
 
    # user group global permissions
 
    user_perms_from_users_groups = Session().query(UserGroupToPerm)\
 
        .options(joinedload(UserGroupToPerm.permission))\
 
        .join((UserGroupMember, UserGroupToPerm.users_group_id ==
 
               UserGroupMember.users_group_id))\
 
        .filter(UserGroupMember.user_id == user_id)\
 
        .join((UserGroup, UserGroupMember.users_group_id ==
 
               UserGroup.users_group_id))\
 
        .filter(UserGroup.users_group_active == True)\
 
        .order_by(UserGroupToPerm.users_group_id)\
 
        .all()
 
    # need to group here by groups since user can be in more than
 
    # one group
 
    _grouped = [[x, list(y)] for x, y in
 
                itertools.groupby(user_perms_from_users_groups,
 
                                  lambda x:x.users_group)]
 
    for gr, perms in _grouped:
 
        # since user can be in multiple groups iterate over them and
 
        # select the lowest permissions first (more explicit)
 
        ##TODO: do this^^
 
        if not gr.inherit_default_permissions:
 
            # NEED TO IGNORE all configurable permissions and
 
            # replace them with explicitly set
 
            permissions[GLOBAL] = permissions[GLOBAL]\
 
                                            .difference(_configurable)
 
        for perm in perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # user specific global permissions
 
    user_perms = Session().query(UserToPerm)\
 
            .options(joinedload(UserToPerm.permission))\
 
            .filter(UserToPerm.user_id == user_id).all()
 

	
 
    if not user_inherit_default_permissions:
 
        # NEED TO IGNORE all configurable permissions and
 
        # replace them with explicitly set
 
        permissions[GLOBAL] = permissions[GLOBAL]\
 
                                        .difference(_configurable)
 

	
 
        for perm in user_perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 
    ## END GLOBAL PERMISSIONS
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORIES !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository and
 
    # fill in his permission from it. _choose_perm decides of which
 
    # permission should be selected based on selected method
 
    #======================================================================
 

	
 
    # user group for repositories permissions
 
    user_repo_perms_from_users_groups = \
 
     Session().query(UserGroupRepoToPerm, Permission, Repository,)\
 
        .join((Repository, UserGroupRepoToPerm.repository_id ==
 
               Repository.repo_id))\
 
        .join((Permission, UserGroupRepoToPerm.permission_id ==
 
               Permission.permission_id))\
 
        .join((UserGroup, UserGroupRepoToPerm.users_group_id ==
 
               UserGroup.users_group_id))\
 
        .filter(UserGroup.users_group_active == True)\
 
        .join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
 
               UserGroupMember.users_group_id))\
 
        .filter(UserGroupMember.user_id == user_id)\
 
        .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_repo_perms_from_users_groups:
 
        r_k = perm.UserGroupRepoToPerm.repository.repo_name
 
        multiple_counter[r_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[RK][r_k]
 

	
 
        if perm.Repository.user_id == user_id:
 
            # set admin if owner
 
            p = 'repository.admin'
 
        else:
 
            if multiple_counter[r_k] > 1:
 
                p = _choose_perm(p, cur_perm)
 
        permissions[RK][r_k] = p
 

	
 
    # user explicit permissions for repositories, overrides any specified
 
    # by the group permission
 
    user_repo_perms = Permission.get_default_perms(user_id)
 
    for perm in user_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        cur_perm = permissions[RK][r_k]
 
        # set admin if owner
 
        if perm.Repository.user_id == user_id:
 
            p = 'repository.admin'
 
        else:
 
            p = perm.Permission.permission_name
 
            if not explicit:
 
                p = _choose_perm(p, cur_perm)
 
        permissions[RK][r_k] = p
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORY GROUPS !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository groups and
 
    # fill in his permission from it. _choose_perm decides of which
 
    # permission should be selected based on selected method
 
    #======================================================================
 
    # user group for repo groups permissions
 
    user_repo_group_perms_from_users_groups = \
 
     Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup)\
 
     .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
     .join((Permission, UserGroupRepoGroupToPerm.permission_id
 
            == Permission.permission_id))\
 
     .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id ==
 
            UserGroup.users_group_id))\
 
     .filter(UserGroup.users_group_active == True)\
 
     .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
 
            == UserGroupMember.users_group_id))\
 
     .filter(UserGroupMember.user_id == user_id)\
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_repo_group_perms_from_users_groups:
 
        g_k = perm.UserGroupRepoGroupToPerm.group.group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[GK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[GK][g_k] = p
 

	
 
    # user explicit permissions for repository groups
 
    user_repo_groups_perms = Permission.get_default_group_perms(user_id)
 
    for perm in user_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[GK][rg_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[GK][rg_k] = p
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR USER GROUPS !!
 
    #======================================================================
 
    # user group for user group permissions
 
    user_group_user_groups_perms = \
 
     Session().query(UserGroupUserGroupToPerm, Permission, UserGroup)\
 
     .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
 
            == UserGroup.users_group_id))\
 
     .join((Permission, UserGroupUserGroupToPerm.permission_id
 
            == Permission.permission_id))\
 
     .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
 
            == UserGroupMember.users_group_id))\
 
     .filter(UserGroupMember.user_id == user_id)\
 
     .join((UserGroup, UserGroupMember.users_group_id ==
 
            UserGroup.users_group_id), aliased=True, from_joinpoint=True)\
 
     .filter(UserGroup.users_group_active == True)\
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_group_user_groups_perms:
 
        g_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][g_k] = p
 

	
 
    #user explicit permission for user groups
 
    user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
 
    for perm in user_user_groups_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][u_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][u_k] = p
 

	
 
    return permissions
 

	
 

	
 
def allowed_api_access(controller_name, whitelist=None, api_key=None):
 
    """
 
    Check if given controller_name is in whitelist API access
 
    """
 
    if not whitelist:
 
        from kallithea import CONFIG
 
        whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'),
 
                           sep=',')
 
        log.debug('whitelist of API access is: %s', whitelist)
 
    api_access_valid = controller_name in whitelist
 
    if api_access_valid:
 
        log.debug('controller:%s is in API whitelist', controller_name)
 
    else:
 
        msg = 'controller: %s is *NOT* in API whitelist' % (controller_name)
 
        if api_key:
 
            #if we use API key and don't have access it's a warning
 
            log.warning(msg)
 
        else:
 
            log.debug(msg)
 
    return api_access_valid
 

	
 

	
 
class AuthUser(object):
 
    """
 
    Represents a Kallithea user, including various authentication and
 
    authorization information. Typically used to store the current user,
 
    but is also used as a generic user information data structure in
 
    parts of the code, e.g. user management.
 

	
 
    Constructed from a database `User` object, a user ID or cookie dict,
 
    it looks up the user (if needed) and copies all attributes to itself,
 
    adding various non-persistent data. If lookup fails but anonymous
 
    access to Kallithea is enabled, the default user is loaded instead.
 

	
 
    `AuthUser` does not by itself authenticate users and the constructor
 
    sets the `is_authenticated` field to False, except when falling back
 
    to the default anonymous user (if enabled). It's up to other parts
 
    of the code to check e.g. if a supplied password is correct, and if
 
    so, set `is_authenticated` to True.
 

	
 
    However, `AuthUser` does refuse to load a user that is not `active`.
 
    """
 

	
 
    def __init__(self, user_id=None, dbuser=None,
 
            is_external_auth=False):
 

	
 
        self.is_authenticated = False
 
        self.is_external_auth = is_external_auth
 

	
 
        user_model = UserModel()
 
        self.anonymous_user = User.get_default_user(cache=True)
 

	
 
        # These attributes will be overriden by fill_data, below, unless the
 
        # requested user cannot be found and the default anonymous user is
 
        # not enabled.
 
        self.user_id = None
 
        self.username = None
 
        self.api_key = None
 
        self.name = ''
 
        self.lastname = ''
 
        self.email = ''
 
        self.admin = False
 
        self.inherit_default_permissions = False
 

	
 
        # Look up database user, if necessary.
 
        if user_id is not None:
 
            log.debug('Auth User lookup by USER ID %s', user_id)
 
            dbuser = user_model.get(user_id)
 
        else:
 
            # Note: dbuser is allowed to be None.
 
            log.debug('Auth User lookup by database user %s', dbuser)
 

	
 
        is_user_loaded = self._fill_data(dbuser)
 

	
 
        # If user cannot be found, try falling back to anonymous.
 
        if not is_user_loaded:
 
            is_user_loaded =  self._fill_data(self.anonymous_user)
 

	
 
        # The anonymous user is always "logged in".
 
        if self.user_id == self.anonymous_user.user_id:
 
            self.is_authenticated = True
 

	
 
        if not self.username:
 
            self.username = 'None'
 

	
 
        log.debug('Auth User is now %s', self)
 

	
 
    def _fill_data(self, dbuser):
 
        """
 
        Copies database fields from a `db.User` to this `AuthUser`. Does
 
        not copy `api_keys` and `permissions` attributes.
 

	
 
        Checks that `dbuser` is `active` (and not None) before copying;
 
        returns True on success.
 
        """
 
        if dbuser is not None and dbuser.active:
 
            log.debug('filling %s data', dbuser)
 
            for k, v in dbuser.get_dict().iteritems():
 
                assert k not in ['api_keys', 'permissions']
 
                setattr(self, k, v)
 
            return True
 
        return False
 

	
 
    @LazyProperty
 
    def permissions(self):
 
        return self.__get_perms(user=self, cache=False)
 

	
 
    @property
 
    def api_keys(self):
 
        return self._get_api_keys()
 

	
 
    def __get_perms(self, user, explicit=True, algo='higherwin', cache=False):
 
        """
 
        Fills user permission attribute with permissions taken from database
 
        works for permissions given for repositories, and for permissions that
 
        are granted to groups
 

	
 
        :param user: `AuthUser` instance
 
        :param explicit: In case there are permissions both for user and a group
 
            that user is part of, explicit flag will define if user will
 
            explicitly override permissions from group, if it's False it will
 
            make decision based on the algo
 
        :param algo: algorithm to decide what permission should be choose if
 
            it's multiple defined, eg user in two different groups. It also
 
            decides if explicit flag is turned off how to specify the permission
 
            for case when user is in a group + have defined separate permission
 
        """
 
        user_id = user.user_id
 
        user_is_admin = user.is_admin
 
        user_inherit_default_permissions = user.inherit_default_permissions
 

	
 
        log.debug('Getting PERMISSION tree')
 
        compute = conditional_cache('short_term', 'cache_desc',
 
                                    condition=cache, func=_cached_perms_data)
 
        return compute(user_id, user_is_admin,
 
                       user_inherit_default_permissions, explicit, algo)
 

	
 
    def _get_api_keys(self):
 
        api_keys = [self.api_key]
 
        for api_key in UserApiKeys.query()\
 
                .filter(UserApiKeys.user_id == self.user_id)\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time())).all():
 
            api_keys.append(api_key.api_key)
 

	
 
        return api_keys
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def repositories_admin(self):
 
        """
 
        Returns list of repositories you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['repositories'].iteritems()
 
                if x[1] == 'repository.admin']
 

	
 
    @property
 
    def repository_groups_admin(self):
 
        """
 
        Returns list of repository groups you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['repositories_groups'].iteritems()
 
                if x[1] == 'group.admin']
 

	
 
    @property
 
    def user_groups_admin(self):
 
        """
 
        Returns list of user groups you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['user_groups'].iteritems()
 
                if x[1] == 'usergroup.admin']
 

	
 
    @staticmethod
 
    def check_ip_allowed(user, ip_addr):
 
        """
 
        Check if the given IP address (a `str`) is allowed for the given
 
        user (an `AuthUser` or `db.User`).
 
        """
 
        allowed_ips = AuthUser.get_allowed_ips(user.user_id, cache=True,
 
            inherit_from_default=user.inherit_default_permissions)
 
        if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
 
            log.debug('IP:%s is in range of %s', ip_addr, allowed_ips)
 
            return True
 
        else:
 
            log.info('Access for IP:%s forbidden, '
 
                     'not in %s' % (ip_addr, allowed_ips))
 
            return False
 

	
 
    def __repr__(self):
 
        return "<AuthUser('id:%s[%s] auth:%s')>"\
 
            % (self.user_id, self.username, self.is_authenticated)
 

	
 
    def set_authenticated(self, authenticated=True):
 
        if self.user_id != self.anonymous_user.user_id:
 
            self.is_authenticated = authenticated
 

	
 
    def to_cookie(self):
 
        """ Serializes this login session to a cookie `dict`. """
 
        return {
 
            'user_id': self.user_id,
 
            'is_authenticated': self.is_authenticated,
 
            'is_external_auth': self.is_external_auth,
 
        }
 

	
 
    @staticmethod
 
    def from_cookie(cookie):
 
        """
 
        Deserializes an `AuthUser` from a cookie `dict`.
 
        """
 

	
 
        au = AuthUser(
 
            user_id=cookie.get('user_id'),
 
            is_external_auth=cookie.get('is_external_auth', False),
 
        )
 
        if not au.is_authenticated and au.user_id is not None:
 
            # user is not authenticated and not empty
 
            au.set_authenticated(cookie.get('is_authenticated'))
 
        return au
 

	
 
    @classmethod
 
    def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
 
        _set = set()
 

	
 
        if inherit_from_default:
 
            default_ips = UserIpMap.query().filter(UserIpMap.user ==
 
                                            User.get_default_user(cache=True))
 
            if cache:
 
                default_ips = default_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_default"))
 

	
 
            # populate from default user
 
            for ip in default_ips:
 
                try:
 
                    _set.add(ip.ip_addr)
 
                except ObjectDeletedError:
 
                    # since we use heavy caching sometimes it happens that we get
 
                    # deleted objects here, we just skip them
 
                    pass
 

	
 
        user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
 
        if cache:
 
            user_ips = user_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_%s" % user_id))
 

	
 
        for ip in user_ips:
 
            try:
 
                _set.add(ip.ip_addr)
 
            except ObjectDeletedError:
 
                # since we use heavy caching sometimes it happens that we get
 
                # deleted objects here, we just skip them
 
                pass
 
        return _set or set(['0.0.0.0/0', '::/0'])
 

	
 

	
 
def set_available_permissions(config):
 
    """
 
    This function will propagate pylons globals with all available defined
 
    permission given in db. We don't want to check each time from db for new
 
    permissions since adding a new permission also requires application restart
 
    ie. to decorate new views with the newly created permission
 

	
 
    :param config: current pylons config instance
 

	
 
    """
 
    log.info('getting information about all available permissions')
 
    try:
 
        sa = meta.Session
 
        all_perms = sa.query(Permission).all()
 
        config['available_permissions'] = [x.permission_name for x in all_perms]
 
    finally:
 
        meta.Session.remove()
 

	
 

	
 
#==============================================================================
 
# CHECK DECORATORS
 
#==============================================================================
 

	
 
def redirect_to_login(message=None):
 
    from kallithea.lib import helpers as h
 
    p = url.current()
 
    if message:
 
        h.flash(h.literal(message), category='warning')
 
    log.debug('Redirecting to login page, origin: %s', p)
 
    return redirect(url('login_home', came_from=p, **request.GET))
 

	
 
class LoginRequired(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page
 

	
 
    :param api_access: if enabled this checks only for valid auth token
 
        and grants access based on valid token
 
    """
 

	
 
    def __init__(self, api_access=False):
 
        self.api_access = api_access
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        controller = fargs[0]
 
        user = controller.authuser
 
        loc = "%s:%s" % (controller.__class__.__name__, func.__name__)
 
        log.debug('Checking access for user %s @ %s', user, loc)
 

	
 
        if not AuthUser.check_ip_allowed(user, controller.ip_addr):
 
            return redirect_to_login(_('IP %s not allowed') % controller.ip_addr)
 

	
 
        # check if we used an API key and it's a valid one
 
        api_key = request.GET.get('api_key')
 
        if api_key is not None:
 
            # explicit controller is enabled or API is in our whitelist
 
            if self.api_access or allowed_api_access(loc, api_key=api_key):
 
                if api_key in user.api_keys:
 
                    log.info('user %s authenticated with API key ****%s @ %s',
 
                             user, api_key[-4:], loc)
 
                    return func(*fargs, **fkwargs)
 
                else:
 
                    log.warning('API key ****%s is NOT valid', api_key[-4:])
 
                    return redirect_to_login(_('Invalid API key'))
 
            else:
 
                # controller does not allow API access
 
                log.warning('API access to %s is not allowed', loc)
 
                return abort(403)
 

	
 
        # Only allow the following HTTP request methods. (We sometimes use POST
 
        # requests with a '_method' set to 'PUT' or 'DELETE'; but that is only
 
        # used for the route lookup, and does not affect request.method.)
 
        if request.method not in ['GET', 'HEAD', 'POST', 'PUT']:
 
            return abort(405)
 

	
 
        # CSRF protection: Whenever a request has ambient authority (whether
 
        # through a session cookie or its origin IP address), it must include
 
        # the correct token, unless the HTTP method is GET or HEAD (and thus
 
        # guaranteed to be side effect free. In practice, the only situation
 
        # where we allow side effects without ambient authority is when the
 
        # authority comes from an API key; and that is handled above.
 
        if request.method not in ['GET', 'HEAD']:
 
            token = request.POST.get(secure_form.token_key)
 
            if not token or token != secure_form.authentication_token():
 
                log.error('CSRF check failed')
 
                return abort(403)
 

	
 
        # WebOb already ignores request payload parameters for anything other
 
        # than POST/PUT, but double-check since other Kallithea code relies on
 
        # this assumption.
 
        if request.method not in ['POST', 'PUT'] and request.POST:
 
            log.error('%r request with payload parameters; WebOb should have stopped this', request.method)
 
            return abort(400)
 

	
 
        # regular user authentication
 
        if user.is_authenticated:
 
            log.info('user %s authenticated with regular auth @ %s', user, loc)
 
            return func(*fargs, **fkwargs)
 
        else:
 
            log.warning('user %s NOT authenticated with regular auth @ %s', user, loc)
 
            return redirect_to_login()
 

	
kallithea/lib/indexers/daemon.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.indexers.daemon
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
A daemon will read from task table and run tasks
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 26, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 
import traceback
 

	
 
from shutil import rmtree
 
from time import mktime
 

	
 
from os.path import dirname as dn
 
from os.path import join as jn
 

	
 
# Add location of top level folder to sys.path
 
project_path = dn(dn(dn(dn(os.path.realpath(__file__)))))
 
sys.path.append(project_path)
 

	
 
from kallithea.config.conf import INDEX_EXTENSIONS
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.db import Repository
 
from kallithea.lib.utils2 import safe_unicode, safe_str
 
from kallithea.lib.indexers import SCHEMA, IDX_NAME, CHGSETS_SCHEMA, \
 
    CHGSET_IDX_NAME
 

	
 
from kallithea.lib.vcs.exceptions import ChangesetError, RepositoryError, \
 
    NodeDoesNotExistError
 

	
 
from whoosh.index import create_in, open_dir, exists_in
 
from whoosh.query import *
 
from whoosh.qparser import QueryParser
 

	
 
log = logging.getLogger('whoosh_indexer')
 

	
 

	
 
class WhooshIndexingDaemon(object):
 
    """
 
    Daemon for atomic indexing jobs
 
    """
 

	
 
    def __init__(self, indexname=IDX_NAME, index_location=None,
 
                 repo_location=None, sa=None, repo_list=None,
 
                 repo_update_list=None):
 
        self.indexname = indexname
 

	
 
        self.index_location = index_location
 
        if not index_location:
 
            raise Exception('You have to provide index location')
 

	
 
        self.repo_location = repo_location
 
        if not repo_location:
 
            raise Exception('You have to provide repositories location')
 

	
 
        self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)
 

	
 
        #filter repo list
 
        if repo_list:
 
            #Fix non-ascii repo names to unicode
 
            repo_list = map(safe_unicode, repo_list)
 
            self.filtered_repo_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_list:
 
                    self.filtered_repo_paths[repo_name] = repo
 

	
 
            self.repo_paths = self.filtered_repo_paths
 

	
 
        #filter update repo list
 
        self.filtered_repo_update_paths = {}
 
        if repo_update_list:
 
            self.filtered_repo_update_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_update_list:
 
                    self.filtered_repo_update_paths[repo_name] = repo
 
            self.repo_paths = self.filtered_repo_update_paths
 

	
 
        self.initial = True
 
        if not os.path.isdir(self.index_location):
 
            os.makedirs(self.index_location)
 
            log.info('Cannot run incremental index since it does not '
 
                     'yet exist running full build')
 
        elif not exists_in(self.index_location, IDX_NAME):
 
            log.info('Running full index build as the file content '
 
                     'index does not exist')
 
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
 
            log.info('Running full index build as the changeset '
 
                     'index does not exist')
 
        else:
 
            self.initial = False
 

	
 
    def _get_index_revision(self, repo):
 
        db_repo = Repository.get_by_repo_name(repo.name_unicode)
 
        landing_rev = 'tip'
 
        if db_repo:
 
            _rev_type, _rev = db_repo.landing_rev
 
            landing_rev = _rev
 
        return landing_rev
 

	
 
    def _get_index_changeset(self, repo, index_rev=None):
 
        if not index_rev:
 
            index_rev = self._get_index_revision(repo)
 
        cs = repo.get_changeset(index_rev)
 
        return cs
 

	
 
    def get_paths(self, repo):
 
        """
 
        recursive walk in root dir and return a set of all path in that dir
 
        based on repository walk function
 
        """
 
        index_paths_ = set()
 
        try:
 
            cs = self._get_index_changeset(repo)
 
            for _topnode, _dirs, files in cs.walk('/'):
 
                for f in files:
 
                    index_paths_.add(jn(safe_str(repo.path), safe_str(f.path)))
 

	
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            pass
 
        return index_paths_
 

	
 
    def get_node(self, repo, path, index_rev=None):
 
        """
 
        gets a filenode based on given full path. It operates on string for
 
        hg git compatibility.
 

	
 
        :param repo: scm repo instance
 
        :param path: full path including root location
 
        :return: FileNode
 
        """
 
        # FIXME: paths should be normalized ... or even better: don't include repo.path
 
        path = safe_str(path)
 
        repo_path = safe_str(repo.path)
 
        assert path.startswith(repo_path)
 
        assert path[len(repo_path)] in (os.path.sep, os.path.altsep)
 
        node_path = path[len(repo_path) + 1:]
 
        cs = self._get_index_changeset(repo, index_rev=index_rev)
 
        node = cs.get_node(node_path)
 
        return node
 

	
 
    def get_node_mtime(self, node):
 
        return mktime(node.last_changeset.date.timetuple())
 

	
 
    def add_doc(self, writer, path, repo, repo_name, index_rev=None):
 
        """
 
        Adding doc to writer this function itself fetches data from
 
        the instance of vcs backend
 
        """
 

	
 
        node = self.get_node(repo, path, index_rev)
 
        indexed = indexed_w_content = 0
 
        # we just index the content of chosen files, and skip binary files
 
        if node.extension in INDEX_EXTENSIONS and not node.is_binary:
 
            u_content = node.content
 
            if not isinstance(u_content, unicode):
 
                log.warning('  >> %s Could not get this content as unicode '
 
                            'replacing with empty content' % path)
 
                u_content = u''
 
            else:
 
                log.debug('    >> %s [WITH CONTENT]', path)
 
                indexed_w_content += 1
 

	
 
        else:
 
            log.debug('    >> %s', path)
 
            # just index file name without it's content
 
            u_content = u''
 
            indexed += 1
 

	
 
        p = safe_unicode(path)
 
        writer.add_document(
 
            fileid=p,
 
            owner=unicode(repo.contact),
 
            repository=safe_unicode(repo_name),
 
            path=p,
 
            content=u_content,
 
            modtime=self.get_node_mtime(node),
 
            extension=node.extension
 
        )
 
        return indexed, indexed_w_content
 

	
 
    def index_changesets(self, writer, repo_name, repo, start_rev=None):
 
        """
 
        Add all changeset in the vcs repo starting at start_rev
 
        to the index writer
 

	
 
        :param writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository from whence the
 
          changeset originates including the repository group
 
        :param repo: the vcs repository instance to index changesets for,
 
          the presumption is the repo has changesets to index
 
        :param start_rev=None: the full sha id to start indexing from
 
          if start_rev is None then index from the first changeset in
 
          the repo
 
        """
 

	
 
        if start_rev is None:
 
            start_rev = repo[0].raw_id
 

	
 
        log.debug('indexing changesets in %s starting at rev: %s',
 
                  repo_name, start_rev)
 

	
 
        indexed = 0
 
        cs_iter = repo.get_changesets(start=start_rev)
 
        total = len(cs_iter)
 
        for cs in cs_iter:
 
            log.debug('    >> %s/%s', cs, total)
 
            writer.add_document(
 
                raw_id=unicode(cs.raw_id),
 
                owner=unicode(repo.contact),
 
                date=cs._timestamp,
 
                repository=safe_unicode(repo_name),
 
                author=cs.author,
 
                message=cs.message,
 
                last=cs.last,
 
                added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(),
 
                removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(),
 
                changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(),
 
                parents=u' '.join([cs.raw_id for cs in cs.parents]),
 
            )
 
            indexed += 1
 

	
 
        log.debug('indexed %d changesets for repo %s', indexed, repo_name)
 
        return indexed
 

	
 
    def index_files(self, file_idx_writer, repo_name, repo):
 
        """
 
        Index files for given repo_name
 

	
 
        :param file_idx_writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository we're indexing
 
        :param repo: instance of vcs repo
 
        """
 
        i_cnt = iwc_cnt = 0
 
        log.debug('building index for %s @revision:%s', repo.path,
 
                                                self._get_index_revision(repo))
 
        index_rev = self._get_index_revision(repo)
 
        for idx_path in self.get_paths(repo):
 
            i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
 
            i_cnt += i
 
            iwc_cnt += iwc
 

	
 
        log.debug('added %s files %s with content for repo %s',
 
                  i_cnt + iwc_cnt, iwc_cnt, repo.path)
 
        return i_cnt, iwc_cnt
 

	
 
    def update_changeset_index(self):
 
        idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME)
 

	
 
        with idx.searcher() as searcher:
 
            writer = idx.writer()
 
            writer_is_dirty = False
 
            try:
 
                indexed_total = 0
 
                repo_name = None
 
                for repo_name, repo in self.repo_paths.items():
 
                    # skip indexing if there aren't any revs in the repo
 
                    num_of_revs = len(repo)
 
                    if num_of_revs < 1:
 
                        continue
 

	
 
                    qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
 
                    q = qp.parse(u"last:t AND %s" % repo_name)
 

	
 
                    results = searcher.search(q)
 

	
 
                    # default to scanning the entire repo
 
                    last_rev = 0
 
                    start_id = None
 

	
 
                    if len(results) > 0:
 
                        # assuming that there is only one result, if not this
 
                        # may require a full re-index.
 
                        start_id = results[0]['raw_id']
 
                        last_rev = repo.get_changeset(revision=start_id).revision
 

	
 
                    # there are new changesets to index or a new repo to index
 
                    if last_rev == 0 or num_of_revs > last_rev + 1:
 
                        # delete the docs in the index for the previous
 
                        # last changeset(s)
 
                        for hit in results:
 
                            q = qp.parse(u"last:t AND %s AND raw_id:%s" %
 
                                            (repo_name, hit['raw_id']))
 
                            writer.delete_by_query(q)
 

	
 
                        # index from the previous last changeset + all new ones
 
                        indexed_total += self.index_changesets(writer,
 
                                                repo_name, repo, start_id)
 
                        writer_is_dirty = True
 
                log.debug('indexed %s changesets for repo %s',
 
                          indexed_total, repo_name
 
                )
 
            finally:
 
                if writer_is_dirty:
 
                    log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<')
 
                    writer.commit(merge=True)
 
                    log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<')
 
                else:
 
                    log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
 

	
 
    def update_file_index(self):
 
        log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
 
                   'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys()))
 

	
 
        idx = open_dir(self.index_location, indexname=self.indexname)
 
        # The set of all paths in the index
 
        indexed_paths = set()
 
        # The set of all paths we need to re-index
 
        to_index = set()
 

	
 
        writer = idx.writer()
 
        writer_is_dirty = False
 
        try:
 
            with idx.reader() as reader:
 

	
 
                # Loop over the stored fields in the index
 
                for fields in reader.all_stored_fields():
 
                    indexed_path = fields['path']
 
                    indexed_repo_path = fields['repository']
 
                    indexed_paths.add(indexed_path)
 

	
 
                    if not indexed_repo_path in self.filtered_repo_update_paths:
 
                        continue
 

	
 
                    repo = self.repo_paths[indexed_repo_path]
 

	
 
                    try:
 
                        node = self.get_node(repo, indexed_path)
 
                        # Check if this file was changed since it was indexed
 
                        indexed_time = fields['modtime']
 
                        mtime = self.get_node_mtime(node)
 
                        if mtime > indexed_time:
 
                            # The file has changed, delete it and add it to
 
                            # the list of files to reindex
 
                            log.debug(
 
                                'adding to reindex list %s mtime: %s vs %s',
 
                                    indexed_path, mtime, indexed_time
 
                            )
 
                            writer.delete_by_term('fileid', indexed_path)
 
                            writer_is_dirty = True
 

	
 
                            to_index.add(indexed_path)
 
                    except (ChangesetError, NodeDoesNotExistError):
 
                        # This file was deleted since it was indexed
 
                        log.debug('removing from index %s', indexed_path)
 
                        writer.delete_by_term('path', indexed_path)
 
                        writer_is_dirty = True
 

	
 
            # Loop over the files in the filesystem
 
            # Assume we have a function that gathers the filenames of the
 
            # documents to be indexed
 
            ri_cnt_total = 0  # indexed
 
            riwc_cnt_total = 0  # indexed with content
 
            for repo_name, repo in self.repo_paths.items():
 
                # skip indexing if there aren't any revisions
 
                if len(repo) < 1:
 
                    continue
 
                ri_cnt = 0   # indexed
 
                riwc_cnt = 0  # indexed with content
 
                for path in self.get_paths(repo):
 
                    path = safe_unicode(path)
 
                    if path in to_index or path not in indexed_paths:
 

	
 
                        # This is either a file that's changed, or a new file
 
                        # that wasn't indexed before. So index it!
 
                        i, iwc = self.add_doc(writer, path, repo, repo_name)
 
                        writer_is_dirty = True
 
                        log.debug('re indexing %s', path)
 
                        ri_cnt += i
 
                        ri_cnt_total += 1
 
                        riwc_cnt += iwc
 
                        riwc_cnt_total += iwc
 
                log.debug('added %s files %s with content for repo %s',
 
                             ri_cnt + riwc_cnt, riwc_cnt, repo.path
 
                )
 
            log.debug('indexed %s files in total and %s with content',
 
                        ri_cnt_total, riwc_cnt_total
 
            )
 
        finally:
 
            if writer_is_dirty:
 
                log.debug('>> COMMITING CHANGES TO FILE INDEX <<')
 
                writer.commit(merge=True)
 
                log.debug('>>> FINISHED REBUILDING FILE INDEX <<<')
 
            else:
 
                log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<')
 
                writer.cancel()
 

	
 
    def build_indexes(self):
 
        if os.path.exists(self.index_location):
 
            log.debug('removing previous index')
 
            rmtree(self.index_location)
 

	
 
        if not os.path.exists(self.index_location):
 
            os.mkdir(self.index_location)
 

	
 
        chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA,
 
                               indexname=CHGSET_IDX_NAME)
 
        chgset_idx_writer = chgset_idx.writer()
 

	
 
        file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
 
        file_idx_writer = file_idx.writer()
 
        log.debug('BUILDING INDEX FOR EXTENSIONS %s '
 
                  'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys()))
 

	
 
        for repo_name, repo in self.repo_paths.items():
 
            # skip indexing if there aren't any revisions
 
            if len(repo) < 1:
 
                continue
 

	
 
            self.index_files(file_idx_writer, repo_name, repo)
 
            self.index_changesets(chgset_idx_writer, repo_name, repo)
 

	
 
        log.debug('>> COMMITING CHANGES <<')
 
        file_idx_writer.commit(merge=True)
 
        chgset_idx_writer.commit(merge=True)
 
        log.debug('>>> FINISHED BUILDING INDEX <<<')
 

	
 
    def update_indexes(self):
 
        self.update_file_index()
 
        self.update_changeset_index()
 

	
 
    def run(self, full_index=False):
 
        """Run daemon"""
 
        if full_index or self.initial:
 
            self.build_indexes()
 
        else:
 
            self.update_indexes()
kallithea/lib/paster_commands/cache_keys.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.cache_keys
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
cleanup-keys paster command for Kallithea
 

	
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: mar 27, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from kallithea.model.meta import Session
 
from kallithea.lib.utils import BasePasterCommand
 
from kallithea.model.db import CacheInvalidation
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Cache keys utils"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 
        _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all()
 
        if self.options.show:
 
            for c_obj in _caches:
 
                print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active)
 
        elif self.options.cleanup:
 
            for c_obj in _caches:
 
                Session().delete(c_obj)
 
                print 'removing key:%s' % (c_obj.cache_key)
 
                Session().commit()
 
        else:
 
            print 'nothing done exiting...'
 
        sys.exit(0)
 

	
 
    def update_parser(self):
 
        self.parser.add_option(
 
            '--show',
 
            action='store_true',
 
            dest='show',
 
            help=("show existing cache keys with together with status")
 
        )
 

	
 
        self.parser.add_option(
 
            '--cleanup',
 
            action="store_true",
 
            dest="cleanup",
 
            help="cleanup existing cache keys"
 
        )
kallithea/lib/paster_commands/cleanup.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.cleanup
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
cleanup-repos paster command for Kallithea
 

	
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import re
 
import shutil
 
import logging
 
import datetime
 

	
 
from kallithea.lib.utils import BasePasterCommand, ask_ok, REMOVED_REPO_PAT
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.model.db import Ui
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Cleanup deleted repos"
 

	
 
    def _parse_older_than(self, val):
 
        regex = re.compile(r'((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)m)?((?P<seconds>\d+?)s)?')
 
        parts = regex.match(val)
 
        if not parts:
 
            return
 
        parts = parts.groupdict()
 
        time_params = {}
 
        for (name, param) in parts.iteritems():
 
            if param:
 
                time_params[name] = int(param)
 
        return datetime.timedelta(**time_params)
 

	
 
    def _extract_date(self, name):
 
        """
 
        Extract the date part from rm__<date> pattern of removed repos,
 
        and convert it to datetime object
 

	
 
        :param name:
 
        """
 
        date_part = name[4:19]  # 4:19 since we don't parse milisecods
 
        return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S')
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        repos_location = Ui.get_repos_location()
 
        to_remove = []
 
        for dn_, dirs, f in os.walk(safe_str(repos_location)):
 
            alldirs = list(dirs)
 
            del dirs[:]
 
            if ('.hg' in alldirs or
 
                'objects' in alldirs and ('refs' in alldirs or 'packed-refs' in f)):
 
                continue
 
            for loc in alldirs:
 
                if REMOVED_REPO_PAT.match(loc):
 
                    to_remove.append([os.path.join(dn_, loc),
 
                                      self._extract_date(loc)])
 
                else:
 
                    dirs.append(loc)
 

	
 
        #filter older than (if present)!
 
        now = datetime.datetime.now()
 
        older_than = self.options.older_than
 
        if older_than:
 
            to_remove_filtered = []
 
            older_than_date = self._parse_older_than(older_than)
 
            for name, date_ in to_remove:
 
                repo_age = now - date_
 
                if repo_age > older_than_date:
 
                    to_remove_filtered.append([name, date_])
 

	
 
            to_remove = to_remove_filtered
 
            print >> sys.stdout, 'removing %s deleted repos older than %s (%s)' \
 
                % (len(to_remove), older_than, older_than_date)
 
        else:
 
            print >> sys.stdout, 'removing all [%s] deleted repos' \
 
                % len(to_remove)
 
        if self.options.dont_ask or not to_remove:
 
            # don't ask just remove !
 
            remove = True
 
        else:
 
            remove = ask_ok('the following repositories will be deleted completely:\n%s\n'
 
                            'are you sure you want to remove them [y/n]?'
 
                            % ', \n'.join(['%s removed on %s'
 
                    % (safe_str(x[0]), safe_str(x[1])) for x in to_remove]))
 

	
 
        if remove:
 
            for path, date_ in to_remove:
 
                print >> sys.stdout, 'removing repository %s' % path
 
                shutil.rmtree(path)
 
        else:
 
            print 'nothing done exiting...'
 
            sys.exit(0)
 

	
 
    def update_parser(self):
 
        self.parser.add_option(
 
            '--older-than',
 
            action='store',
 
            dest='older_than',
 
            help=("only remove repos that have been removed "
 
                 "at least given time ago. "
 
                 "The default is to remove all removed repositories. "
 
                 "Possible suffixes: "
 
                 "d (days), h (hours), m (minutes), s (seconds). "
 
                 "For example --older-than=30d deletes repositories "
 
                 "removed more than 30 days ago.")
 
            )
 

	
 
        self.parser.add_option(
 
            '--dont-ask',
 
            action="store_true",
 
            dest="dont_ask",
 
            help="remove repositories without asking for confirmation."
 
        )
kallithea/lib/paster_commands/ishell.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.ishell
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
interactive shell paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from kallithea.lib.utils import BasePasterCommand
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Interactive shell"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        # imports, used in ipython shell
 
        import os
 
        import sys
 
        import time
 
        import shutil
 
        import datetime
 
        from kallithea.model.db import *
 

	
 
        try:
 
            from IPython import embed
 
            from IPython.config.loader import Config
 
            cfg = Config()
 
            cfg.InteractiveShellEmbed.confirm_exit = False
 
            embed(config=cfg, banner1="Kallithea IShell.")
 
        except ImportError:
 
            print 'ipython installation required for ishell'
 
            sys.exit(-1)
 

	
 
    def update_parser(self):
 
        pass
kallithea/lib/paster_commands/make_index.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.make_index
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
make-index paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Aug 17, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from string import strip
 
from kallithea.model.repo import RepoModel
 
from kallithea.lib.utils import BasePasterCommand, load_rcextensions
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Creates or updates full text search index"
 

	
 
    def command(self):
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        #get SqlAlchemy session
 
        self._init_session()
 
        from pylons import config
 
        index_location = config['index_dir']
 
        load_rcextensions(config['here'])
 

	
 
        repo_location = self.options.repo_location \
 
            if self.options.repo_location else RepoModel().repos_path
 
        repo_list = map(strip, self.options.repo_list.split(',')) \
 
            if self.options.repo_list else None
 

	
 
        repo_update_list = map(strip, self.options.repo_update_list.split(',')) \
 
            if self.options.repo_update_list else None
 

	
 
        #======================================================================
 
        # WHOOSH DAEMON
 
        #======================================================================
 
        from kallithea.lib.pidlock import LockHeld, DaemonLock
 
        from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
        try:
 
            l = DaemonLock(file_=os.path.join(dn(dn(index_location)),
 
                                              'make_index.lock'))
 
            WhooshIndexingDaemon(index_location=index_location,
 
                                 repo_location=repo_location,
 
                                 repo_list=repo_list,
 
                                 repo_update_list=repo_update_list)\
 
                .run(full_index=self.options.full_index)
 
            l.release()
 
        except LockHeld:
 
            sys.exit(1)
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--repo-location',
 
                          action='store',
 
                          dest='repo_location',
 
                          help="Specifies repositories location to index OPTIONAL",
 
                          )
 
        self.parser.add_option('--index-only',
 
                          action='store',
 
                          dest='repo_list',
 
                          help="Specifies a comma separated list of repositories "
 
                                "to build index on. If not given all repositories "
 
                                "are scanned for indexing. OPTIONAL",
 
                          )
 
        self.parser.add_option('--update-only',
 
                          action='store',
 
                          dest='repo_update_list',
 
                          help="Specifies a comma separated list of repositories "
 
                                "to re-build index on. OPTIONAL",
 
                          )
 
        self.parser.add_option('-f',
 
                          action='store_true',
 
                          dest='full_index',
 
                          help="Specifies that index should be made full i.e"
 
                                " destroy old and build from scratch",
 
                          default=False)
kallithea/lib/paster_commands/make_rcextensions.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.make_rcextensions
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
make-rcext paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Mar 6, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import pkg_resources
 

	
 
from kallithea.lib.utils import BasePasterCommand, ask_ok
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Write template file for extending Kallithea in Python."
 
    usage = "CONFIG_FILE"
 
    description = '''\
 
        A rcextensions directory with a __init__.py file will be created next to
 
        the ini file. Local customizations in that file will survive upgrades.
 
        The file contains instructions on how it can be customized.
 
        '''
 

	
 
    def command(self):
 
        from pylons import config
 

	
 
        here = config['here']
 
        content = pkg_resources.resource_string(
 
            'kallithea', os.path.join('config', 'rcextensions', '__init__.py')
 
        )
 
        ext_file = os.path.join(here, 'rcextensions', '__init__.py')
 
        if os.path.exists(ext_file):
 
            msg = ('Extension file already exists, do you want '
 
                   'to overwrite it ? [y/n]')
 
            if not ask_ok(msg):
 
                print 'Nothing done...'
 
                return
 

	
 
        dirname = os.path.dirname(ext_file)
 
        if not os.path.isdir(dirname):
 
            os.makedirs(dirname)
 
        with open(ext_file, 'wb') as f:
 
            f.write(content)
 
            print 'Wrote new extensions file to %s' % ext_file
 

	
 
    def update_parser(self):
 
        pass
kallithea/lib/paster_commands/repo_scan.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.repo_scan
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
repo-scan paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Feb 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 

	
 
from kallithea.model.scm import ScmModel
 
from kallithea.lib.utils import BasePasterCommand, repo2db_mapper
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Rescan default location for new repositories"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 
        rm_obsolete = self.options.delete_obsolete
 
        log.info('Now scanning root location for new repos...')
 
        added, removed = repo2db_mapper(ScmModel().repo_scan(),
 
                                        remove_obsolete=rm_obsolete)
 
        added = ', '.join(added) or '-'
 
        removed = ', '.join(removed) or '-'
 
        log.info('Scan completed added: %s removed: %s', added, removed)
 

	
 
    def update_parser(self):
 
        self.parser.add_option(
 
            '--delete-obsolete',
 
            action='store_true',
 
            help="Use this flag do delete repositories that are "
 
                 "present in Kallithea database but not on the filesystem",
 
        )
kallithea/lib/paster_commands/update_repoinfo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.paster_commands.update_repoinfo
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
update-repoinfo paster command for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jul 14, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 

	
 
import os
 
import sys
 
import logging
 
import string
 

	
 
from kallithea.lib.utils import BasePasterCommand
 
from kallithea.model.db import Repository
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.meta import Session
 

	
 
# Add location of top level folder to sys.path
 
from os.path import dirname as dn
 
rc_path = dn(dn(dn(os.path.realpath(__file__))))
 
sys.path.append(rc_path)
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class Command(BasePasterCommand):
 

	
 
    max_args = 1
 
    min_args = 1
 

	
 
    usage = "CONFIG_FILE"
 
    group_name = "Kallithea"
 
    takes_config_file = -1
 
    parser = BasePasterCommand.standard_parser(verbose=True)
 
    summary = "Updates repositories caches for last changeset"
 

	
 
    def command(self):
 
        #get SqlAlchemy session
 
        self._init_session()
 

	
 
        repo_update_list = map(string.strip,
 
                               self.options.repo_update_list.split(',')) \
 
                               if self.options.repo_update_list else None
 

	
 
        if repo_update_list:
 
            repo_list = list(Repository.query()\
 
                .filter(Repository.repo_name.in_(repo_update_list)))
 
        else:
 
            repo_list = Repository.getAll()
 
        RepoModel.update_repoinfo(repositories=repo_list)
 
        Session().commit()
 

	
 
        if self.options.invalidate_cache:
 
            for r in repo_list:
 
                r.set_invalidate()
 
        print 'Updated cache for %s repositories' % (len(repo_list))
 

	
 
    def update_parser(self):
 
        self.parser.add_option('--update-only',
 
                           action='store',
 
                           dest='repo_update_list',
 
                           help="Specifies a comma separated list of repositories "
 
                                "to update last commit info for. OPTIONAL")
 
        self.parser.add_option('--invalidate-cache',
 
                           action='store_true',
 
                           dest='invalidate_cache',
 
                           help="Trigger cache invalidation event for repos. "
 
                                "OPTIONAL")
kallithea/lib/pidlock.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from __future__ import with_statement
 
import os
 
import errno
 

	
 
from multiprocessing.util import Finalize
 

	
 
from kallithea.lib.compat import kill
 

	
 

	
 
class LockHeld(Exception):
 
    pass
 

	
 

	
 
class DaemonLock(object):
 
    """daemon locking
 
    USAGE:
 
    try:
 
        l = DaemonLock(file_='/path/tolockfile',desc='test lock')
 
        main()
 
        l.release()
 
    except LockHeld:
 
        sys.exit(1)
 
    """
 

	
 
    def __init__(self, file_=None, callbackfn=None,
 
                 desc='daemon lock', debug=False):
 

	
 
        lock_name = os.path.join(os.path.dirname(__file__), 'running.lock')
 
        self.pidfile = file_ if file_ else lock_name
 
        self.callbackfn = callbackfn
 
        self.desc = desc
 
        self.debug = debug
 
        self.held = False
 
        #run the lock automatically !
 
        self.lock()
 
        self._finalize = Finalize(self, DaemonLock._on_finalize,
 
                                  args=(self, debug), exitpriority=10)
 

	
 
    @staticmethod
 
    def _on_finalize(lock, debug):
 
        if lock.held:
 
            if debug:
 
                print 'lock held finalizing and running lock.release()'
 
            lock.release()
 

	
 
    def lock(self):
 
        """
 
        locking function, if lock is present it
 
        will raise LockHeld exception
 
        """
 
        lockname = str(os.getpid())
 
        if self.debug:
 
            print 'running lock'
 
        self.trylock()
 
        self.makelock(lockname, self.pidfile)
 
        return True
 

	
 
    def trylock(self):
 
        running_pid = False
 
        if self.debug:
 
            print 'checking for already running process'
 
        try:
 
            with open(self.pidfile, 'r') as f:
 
                try:
 
                    running_pid = int(f.readline())
 
                except ValueError:
 
                    running_pid = -1
 

	
 
            if self.debug:
 
                print ('lock file present running_pid: %s, '
 
                       'checking for execution' % (running_pid,))
 
            # Now we check the PID from lock file matches to the current
 
            # process PID
 
            if running_pid:
 
                try:
 
                    kill(running_pid, 0)
 
                except OSError as exc:
 
                    if exc.errno in (errno.ESRCH, errno.EPERM):
 
                        print ("Lock File is there but"
 
                               " the program is not running")
 
                        print "Removing lock file for the: %s" % running_pid
 
                        self.release()
 
                    else:
 
                        raise
 
                else:
 
                    print "You already have an instance of the program running"
 
                    print "It is running as process %s" % running_pid
 
                    raise LockHeld()
 

	
 
        except IOError as e:
 
            if e.errno != 2:
 
                raise
 

	
 
    def release(self):
 
        """releases the pid by removing the pidfile
 
        """
 
        if self.debug:
 
            print 'trying to release the pidlock'
 

	
 
        if self.callbackfn:
 
            #execute callback function on release
 
            if self.debug:
 
                print 'executing callback function %s' % self.callbackfn
 
            self.callbackfn()
 
        try:
 
            if self.debug:
 
                print 'removing pidfile %s' % self.pidfile
 
            os.remove(self.pidfile)
 
            self.held = False
 
        except OSError as e:
 
            if self.debug:
 
                print 'removing pidfile failed %s' % e
 
            pass
 

	
 
    def makelock(self, lockname, pidfile):
 
        """
 
        this function will make an actual lock
 

	
 
        :param lockname: actual pid of file
 
        :param pidfile: the file to write the pid in
 
        """
 
        if self.debug:
 
            print 'creating a file %s and pid: %s' % (pidfile, lockname)
 

	
 
        dir_, file_ = os.path.split(pidfile)
 
        if not os.path.isdir(dir_):
 
            os.makedirs(dir_)
 
        with open(self.pidfile, 'wb') as f:
 
            f.write(lockname)
 
        self.held = True
kallithea/lib/profiler.py
Show inline comments
 
from __future__ import with_statement
 

	
 
import objgraph
 
import cProfile
 
import pstats
 
import cgi
 
import pprint
 
import threading
 

	
 
from StringIO import StringIO
 

	
 

	
 
class ProfilingMiddleware(object):
 
    def __init__(self, app):
 
        self.lock = threading.Lock()
 
        self.app = app
 

	
 
    def __call__(self, environ, start_response):
 
        with self.lock:
 
            profiler = cProfile.Profile()
 

	
 
            def run_app(*a, **kw):
 
                self.response = self.app(environ, start_response)
 

	
 
            profiler.runcall(run_app, environ, start_response)
 

	
 
            profiler.snapshot_stats()
 

	
 
            stats = pstats.Stats(profiler)
 
            stats.sort_stats('calls') #cumulative
 

	
 
            # Redirect output
 
            out = StringIO()
 
            stats.stream = out
 

	
 
            stats.print_stats()
 

	
 
            resp = ''.join(self.response)
 

	
 
            # Lets at least only put this on html-like responses.
 
            if resp.strip().startswith('<'):
 
                ## The profiling info is just appended to the response.
 
                ##  Browsers don't mind this.
 
                resp += ('<pre style="text-align:left; '
 
                         'border-top: 4px dashed red; padding: 1em;">')
 
                resp += cgi.escape(out.getvalue(), True)
 

	
 
                ct = objgraph.show_most_common_types()
 
                print ct
 

	
 
                resp += ct if ct else '---'
 

	
 
                output = StringIO()
 
                pprint.pprint(environ, output, depth=3)
 

	
 
                resp += cgi.escape(output.getvalue(), True)
 
                resp += '</pre>'
 

	
 
            return resp
kallithea/model/api_key.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.api_key
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
API key model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Sep 8, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import time
 
import logging
 
from sqlalchemy import or_
 

	
 
from kallithea.lib.utils2 import generate_api_key
 
from kallithea.model import BaseModel
 
from kallithea.model.db import UserApiKeys
 
from kallithea.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ApiKeyModel(BaseModel):
 
    cls = UserApiKeys
 

	
 
    def create(self, user, description, lifetime=-1):
 
        """
 
        :param user: user or user_id
 
        :param description: description of ApiKey
 
        :param lifetime: expiration time in seconds
 
        """
 
        user = self._get_user(user)
 

	
 
        new_api_key = UserApiKeys()
 
        new_api_key.api_key = generate_api_key()
 
        new_api_key.user_id = user.user_id
 
        new_api_key.description = description
 
        new_api_key.expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 
        Session().add(new_api_key)
 

	
 
        return new_api_key
 

	
 
    def delete(self, api_key, user=None):
 
        """
 
        Deletes given api_key, if user is set it also filters the object for
 
        deletion by given user.
 
        """
 
        api_key = UserApiKeys.query().filter(UserApiKeys.api_key == api_key)
 

	
 
        if user is not None:
 
            user = self._get_user(user)
 
            api_key = api_key.filter(UserApiKeys.user_id == user.user_id)
 

	
 
        api_key = api_key.scalar()
 
        Session().delete(api_key)
 

	
 
    def get_api_keys(self, user, show_expired=True):
 
        user = self._get_user(user)
 
        user_api_keys = UserApiKeys.query()\
 
            .filter(UserApiKeys.user_id == user.user_id)
 
        if not show_expired:
 
            user_api_keys = user_api_keys\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time()))
 
        return user_api_keys
kallithea/model/gist.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.gist
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
gist model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import time
 
import logging
 
import traceback
 
import shutil
 

	
 
from kallithea.lib.utils2 import safe_unicode, unique_id, safe_int, \
 
    time_to_datetime, AttributeDict
 
from kallithea.lib.compat import json
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Gist
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 

	
 
log = logging.getLogger(__name__)
 

	
 
GIST_STORE_LOC = '.rc_gist_store'
 
GIST_METADATA_FILE = '.rc_gist_metadata'
 

	
 

	
 
class GistModel(BaseModel):
 
    cls = Gist
 

	
 
    def _get_gist(self, gist):
 
        """
 
        Helper method to get gist by ID, or gist_access_id as a fallback
 

	
 
        :param gist: GistID, gist_access_id, or Gist instance
 
        """
 
        return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
 

	
 
    def __delete_gist(self, gist):
 
        """
 
        removes gist from filesystem
 

	
 
        :param gist: gist object
 
        """
 
        root_path = RepoModel().repos_path
 
        rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
 
        log.info("Removing %s", rm_path)
 
        shutil.rmtree(rm_path)
 

	
 
    def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
 
                        gist_expires):
 
        """
 
        store metadata inside the gist, this can be later used for imports
 
        or gist identification
 
        """
 
        metadata = {
 
            'metadata_version': '1',
 
            'gist_db_id': gist_id,
 
            'gist_access_id': gist_access_id,
 
            'gist_owner_id': user_id,
 
            'gist_type': gist_type,
 
            'gist_expires': gist_expires,
 
            'gist_updated': time.time(),
 
        }
 
        with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
 
            f.write(json.dumps(metadata))
 

	
 
    def get_gist(self, gist):
 
        return self._get_gist(gist)
 

	
 
    def get_gist_files(self, gist_access_id, revision=None):
 
        """
 
        Get files for given gist
 

	
 
        :param gist_access_id:
 
        """
 
        repo = Gist.get_by_access_id(gist_access_id)
 
        cs = repo.scm_instance.get_changeset(revision)
 
        return cs, [n for n in cs.get_node('/')]
 

	
 
    def create(self, description, owner, gist_mapping,
 
               gist_type=Gist.GIST_PUBLIC, lifetime=-1):
 
        """
 

	
 
        :param description: description of the gist
 
        :param owner: user who created this gist
 
        :param gist_mapping: mapping {filename:{'content':content},...}
 
        :param gist_type: type of gist private/public
 
        :param lifetime: in minutes, -1 == forever
 
        """
 
        owner = self._get_user(owner)
 
        gist_id = safe_unicode(unique_id(20))
 
        lifetime = safe_int(lifetime, -1)
 
        gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 
        log.debug('set GIST expiration date to: %s',
 
                  time_to_datetime(gist_expires)
 
                   if gist_expires != -1 else 'forever')
 
        #create the Database version
 
        gist = Gist()
 
        gist.gist_description = description
 
        gist.gist_access_id = gist_id
 
        gist.gist_owner = owner.user_id
 
        gist.gist_expires = gist_expires
 
        gist.gist_type = safe_unicode(gist_type)
 
        self.sa.add(gist)
 
        self.sa.flush()
 
        if gist_type == Gist.GIST_PUBLIC:
 
            # use DB ID for easy to use GIST ID
 
            gist_id = safe_unicode(gist.gist_id)
 
            gist.gist_access_id = gist_id
 
            self.sa.add(gist)
 

	
 
        gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
 
        log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
 
        repo = RepoModel()._create_filesystem_repo(
 
            repo_name=gist_id, repo_type='hg', repo_group=GIST_STORE_LOC)
 

	
 
        processed_mapping = {}
 
        for filename in gist_mapping:
 
            if filename != os.path.basename(filename):
 
                raise Exception('Filename cannot be inside a directory')
 

	
 
            content = gist_mapping[filename]['content']
 
            #TODO: expand support for setting explicit lexers
 
#             if lexer is None:
 
#                 try:
 
#                     guess_lexer = pygments.lexers.guess_lexer_for_filename
 
#                     lexer = guess_lexer(filename,content)
 
#                 except pygments.util.ClassNotFound:
 
#                     lexer = 'text'
 
            processed_mapping[filename] = {'content': content}
 

	
 
        # now create single multifile commit
 
        message = 'added file'
 
        message += 's: ' if len(processed_mapping) > 1 else ': '
 
        message += ', '.join([x for x in processed_mapping])
 

	
 
        #fake Kallithea Repository object
 
        fake_repo = AttributeDict(dict(
 
            repo_name=gist_repo_path,
 
            scm_instance_no_cache=lambda: repo,
 
        ))
 
        ScmModel().create_nodes(
 
            user=owner.user_id, repo=fake_repo,
 
            message=message,
 
            nodes=processed_mapping,
 
            trigger_push_hook=False
 
        )
 

	
 
        self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
 
                             owner.user_id, gist.gist_type, gist.gist_expires)
 
        return gist
 

	
 
    def delete(self, gist, fs_remove=True):
 
        gist = self._get_gist(gist)
 
        try:
 
            self.sa.delete(gist)
 
            if fs_remove:
 
                self.__delete_gist(gist)
 
            else:
 
                log.debug('skipping removal from filesystem')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update(self, gist, description, owner, gist_mapping, gist_type,
 
               lifetime):
 
        gist = self._get_gist(gist)
 
        gist_repo = gist.scm_instance
 

	
 
        lifetime = safe_int(lifetime, -1)
 
        if lifetime == 0:  # preserve old value
 
            gist_expires = gist.gist_expires
 
        else:
 
            gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 

	
 
        #calculate operation type based on given data
 
        gist_mapping_op = {}
 
        for k, v in gist_mapping.items():
 
            # add, mod, del
 
            if not v['org_filename'] and v['filename']:
 
                op = 'add'
 
            elif v['org_filename'] and not v['filename']:
 
                op = 'del'
 
            else:
 
                op = 'mod'
 

	
 
            v['op'] = op
 
            gist_mapping_op[k] = v
 

	
 
        gist.gist_description = description
 
        gist.gist_expires = gist_expires
 
        gist.owner = owner
 
        gist.gist_type = gist_type
 
        self.sa.add(gist)
 
        self.sa.flush()
 

	
 
        message = 'updated file'
 
        message += 's: ' if len(gist_mapping) > 1 else ': '
 
        message += ', '.join([x for x in gist_mapping])
 

	
 
        #fake Kallithea Repository object
 
        fake_repo = AttributeDict(dict(
 
            repo_name=gist_repo.path,
 
            scm_instance_no_cache=lambda: gist_repo,
 
        ))
 

	
 
        self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
 
                             owner.user_id, gist.gist_type, gist.gist_expires)
 

	
 
        ScmModel().update_nodes(
 
            user=owner.user_id,
 
            repo=fake_repo,
 
            message=message,
 
            nodes=gist_mapping_op,
 
            trigger_push_hook=False
 
        )
 

	
 
        return gist
kallithea/model/repo.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.repo
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
Repository model for kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 5, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import shutil
 
import logging
 
import traceback
 
from datetime import datetime
 
from sqlalchemy.orm import subqueryload
 

	
 
from kallithea.lib.utils import make_ui
 
from kallithea.lib.vcs.backends import get_backend
 
from kallithea.lib.compat import json
 
from kallithea.lib.utils2 import LazyProperty, safe_str, safe_unicode, \
 
    remove_prefix, obfuscate_url_pw, get_current_authuser
 
from kallithea.lib.caching_query import FromCache
 
from kallithea.lib.hooks import log_delete_repository
 

	
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, UserRepoToPerm, UserGroupRepoToPerm, \
 
    UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, \
 
    Statistics, UserGroup, Ui, RepoGroup, RepositoryField
 

	
 
from kallithea.lib import helpers as h
 
from kallithea.lib.auth import HasRepoPermissionAny, HasUserGroupPermissionAny
 
from kallithea.lib.exceptions import AttachedForksError
 
from kallithea.model.scm import UserGroupList
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class RepoModel(BaseModel):
 

	
 
    cls = Repository
 
    URL_SEPARATOR = Repository.url_sep()
 

	
 
    def _get_user_group(self, users_group):
 
        return self._get_instance(UserGroup, users_group,
 
                                  callback=UserGroup.get_by_group_name)
 

	
 
    def _get_repo_group(self, repo_group):
 
        return self._get_instance(RepoGroup, repo_group,
 
                                  callback=RepoGroup.get_by_group_name)
 

	
 
    def _create_default_perms(self, repository, private):
 
        # create default permission
 
        default = 'repository.read'
 
        def_user = User.get_default_user()
 
        for p in def_user.user_perms:
 
            if p.permission.permission_name.startswith('repository.'):
 
                default = p.permission.permission_name
 
                break
 

	
 
        default_perm = 'repository.none' if private else default
 

	
 
        repo_to_perm = UserRepoToPerm()
 
        repo_to_perm.permission = Permission.get_by_key(default_perm)
 

	
 
        repo_to_perm.repository = repository
 
        repo_to_perm.user_id = def_user.user_id
 

	
 
        return repo_to_perm
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 
        return q.ui_value
 

	
 
    def get(self, repo_id, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_id == repo_id)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_id))
 
        return repo.scalar()
 

	
 
    def get_repo(self, repository):
 
        return self._get_repo(repository)
 

	
 
    def get_by_repo_name(self, repo_name, cache=False):
 
        repo = self.sa.query(Repository) \
 
            .filter(Repository.repo_name == repo_name)
 

	
 
        if cache:
 
            repo = repo.options(FromCache("sql_cache_short",
 
                                          "get_repo_%s" % repo_name))
 
        return repo.scalar()
 

	
 
    def get_all_user_repos(self, user):
 
        """
 
        Gets all repositories that user have at least read access
 

	
 
        :param user:
 
        """
 
        from kallithea.lib.auth import AuthUser
 
        user = self._get_user(user)
 
        repos = AuthUser(user_id=user.user_id).permissions['repositories']
 
        access_check = lambda r: r[1] in ['repository.read',
 
                                          'repository.write',
 
                                          'repository.admin']
 
        repos = [x[0] for x in filter(access_check, repos.items())]
 
        return Repository.query().filter(Repository.repo_name.in_(repos))
 

	
 
    def get_users_js(self):
 
        users = self.sa.query(User).filter(User.active == True).all()
 
        return json.dumps([
 
            {
 
                'id': u.user_id,
 
                'fname': h.escape(u.name),
 
                'lname': h.escape(u.lastname),
 
                'nname': u.username,
 
                'gravatar_lnk': h.gravatar_url(u.email, size=28),
 
                'gravatar_size': 14,
 
            } for u in users]
 
        )
 

	
 
    def get_user_groups_js(self):
 
        user_groups = self.sa.query(UserGroup) \
 
            .filter(UserGroup.users_group_active == True) \
 
            .options(subqueryload(UserGroup.members)) \
 
            .all()
 
        user_groups = UserGroupList(user_groups, perm_set=['usergroup.read',
 
                                                           'usergroup.write',
 
                                                           'usergroup.admin'])
 
        return json.dumps([
 
            {
 
                'id': gr.users_group_id,
 
                'grname': gr.users_group_name,
 
                'grmembers': len(gr.members),
 
            } for gr in user_groups]
 
        )
 

	
 
    @classmethod
 
    def _render_datatable(cls, tmpl, *args, **kwargs):
 
        import kallithea
 
        from pylons import tmpl_context as c
 
        from pylons.i18n.translation import _
 

	
 
        _tmpl_lookup = kallithea.CONFIG['pylons.app_globals'].mako_lookup
 
        template = _tmpl_lookup.get_template('data_table/_dt_elements.html')
 

	
 
        tmpl = template.get_def(tmpl)
 
        kwargs.update(dict(_=_, h=h, c=c))
 
        return tmpl.render(*args, **kwargs)
 

	
 
    @classmethod
 
    def update_repoinfo(cls, repositories=None):
 
        if not repositories:
 
            repositories = Repository.getAll()
 
        for repo in repositories:
 
            repo.update_changeset_cache()
 

	
 
    def get_repos_as_dict(self, repos_list=None, admin=False, perm_check=True,
 
                          super_user_actions=False):
 
        _render = self._render_datatable
 
        from pylons import tmpl_context as c
 

	
 
        def quick_menu(repo_name):
 
            return _render('quick_menu', repo_name)
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=not admin, admin=False)
 

	
 
        def last_change(last_change):
 
            return _render("last_change", last_change)
 

	
 
        def rss_lnk(repo_name):
 
            return _render("rss", repo_name)
 

	
 
        def atom_lnk(repo_name):
 
            return _render("atom", repo_name)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            return h.urlify_text(desc, truncate=60, stylize=c.visual.stylify_metatags)
 

	
 
        def state(repo_state):
 
            return _render("repo_state", repo_state)
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name, super_user_actions)
 

	
 
        def owner_actions(user_id, username):
 
            return _render('user_name', user_id, username)
 

	
 
        repos_data = []
 
        for repo in repos_list:
 
            if perm_check:
 
                # check permission at this level
 
                if not HasRepoPermissionAny(
 
                        'repository.read', 'repository.write',
 
                        'repository.admin'
 
                )(repo.repo_name, 'get_repos_as_dict check'):
 
                    continue
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "menu": quick_menu(repo.repo_name),
 
                "raw_name": repo.repo_name.lower(),
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_change": last_change(repo.last_db_change),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "desc": desc(repo.description),
 
                "owner": h.person(repo.user),
 
                "state": state(repo.repo_state),
 
                "rss": rss_lnk(repo.repo_name),
 
                "atom": atom_lnk(repo.repo_name),
 

	
 
            }
 
            if admin:
 
                row.update({
 
                    "action": repo_actions(repo.repo_name),
 
                    "owner": owner_actions(repo.user.user_id,
 
                                           h.person(repo.user))
 
                })
 
            repos_data.append(row)
 

	
 
        return {
 
            "totalRecords": len(repos_list),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": repos_data
 
        }
 

	
 
    def _get_defaults(self, repo_name):
 
        """
 
        Gets information about repository, and returns a dict for
 
        usage in forms
 

	
 
        :param repo_name:
 
        """
 

	
 
        repo_info = Repository.get_by_repo_name(repo_name)
 

	
 
        if repo_info is None:
 
            return None
 

	
 
        defaults = repo_info.get_dict()
 
        group, repo_name, repo_name_full = repo_info.groups_and_repo
 
        defaults['repo_name'] = repo_name
 
        defaults['repo_group'] = getattr(group[-1] if group else None,
 
                                         'group_id', None)
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'), (1, 'repo_enable_locking'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.user:
 
            defaults.update({'user': repo_info.user.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'user': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = self._get_repo(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'user' in kwargs:
 
                cur_repo.user = User.get_by_username(kwargs['user'])
 

	
 
            if 'repo_group' in kwargs:
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_enable_locking',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                cur_repo.clone_uri = clone_uri
 

	
 
            new_name = cur_repo.get_new_name(kwargs['repo_name'])
 
            cur_repo.repo_name = new_name
 
            #if private flag is set, reset default permission to NONE
 

	
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                #handle extra fields
 
            for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
 
                                kwargs):
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 
                    self.sa.add(ex_field)
 
            self.sa.add(cur_repo)
 

	
 
            if org_repo_name != new_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=new_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
 
                     private=False, clone_uri=None, repo_group=None,
 
                     landing_rev='rev:tip', fork_of=None,
 
                     copy_fork_permissions=False, enable_statistics=False,
 
                     enable_locking=False, enable_downloads=False,
 
                     copy_group_permissions=False, state=Repository.STATE_PENDING):
 
        """
 
        Create repository inside database with PENDING state. This should only be
 
        executed by create() repo, with exception of importing existing repos.
 

	
 
        """
 
        from kallithea.model.scm import ScmModel
 

	
 
        owner = self._get_user(owner)
 
        fork_of = self._get_repo(fork_of)
 
        repo_group = self._get_repo_group(repo_group)
 
        try:
 
            repo_name = safe_unicode(repo_name)
 
            description = safe_unicode(description)
 
            # repo name is just a name of repository
 
            # while repo_name_full is a full qualified name that is combined
 
            # with name and path of group
 
            repo_name_full = repo_name
 
            repo_name = repo_name.split(self.URL_SEPARATOR)[-1]
 

	
 
            new_repo = Repository()
 
            new_repo.repo_state = state
 
            new_repo.enable_statistics = False
 
            new_repo.repo_name = repo_name_full
 
            new_repo.repo_type = repo_type
 
            new_repo.user = owner
 
            new_repo.group = repo_group
 
            new_repo.description = description or repo_name
 
            new_repo.private = private
 
            new_repo.clone_uri = clone_uri
 
            new_repo.landing_rev = landing_rev
 

	
 
            new_repo.enable_statistics = enable_statistics
 
            new_repo.enable_locking = enable_locking
 
            new_repo.enable_downloads = enable_downloads
 

	
 
            if repo_group:
 
                new_repo.enable_locking = repo_group.enable_locking
 

	
 
            if fork_of:
 
                parent_repo = fork_of
 
                new_repo.fork = parent_repo
 

	
 
            self.sa.add(new_repo)
 

	
 
            if fork_of and copy_fork_permissions:
 
                repo = fork_of
 
                user_perms = UserRepoToPerm.query() \
 
                    .filter(UserRepoToPerm.repository == repo).all()
 
                group_perms = UserGroupRepoToPerm.query() \
 
                    .filter(UserGroupRepoToPerm.repository == repo).all()
 

	
 
                for perm in user_perms:
 
                    UserRepoToPerm.create(perm.user, new_repo, perm.permission)
 

	
 
                for perm in group_perms:
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo,
 
                                               perm.permission)
 

	
 
            elif repo_group and copy_group_permissions:
 

	
 
                user_perms = UserRepoGroupToPerm.query() \
 
                    .filter(UserRepoGroupToPerm.group == repo_group).all()
 

	
 
                group_perms = UserGroupRepoGroupToPerm.query() \
 
                    .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
 

	
 
                for perm in user_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserRepoToPerm.create(perm.user, new_repo, perm_obj)
 

	
 
                for perm in group_perms:
 
                    perm_name = perm.permission.permission_name.replace('group.', 'repository.')
 
                    perm_obj = Permission.get_by_key(perm_name)
 
                    UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
 

	
 
            else:
 
                perm_obj = self._create_default_perms(new_repo, private)
 
                self.sa.add(perm_obj)
 

	
 
            # now automatically start following this repository as owner
 
            ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
 
                                                    owner.user_id)
 
            # we need to flush here, in order to check if database won't
 
            # throw any exceptions, create filesystem dirs at the very end
 
            self.sa.flush()
 
            return new_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create(self, form_data, cur_user):
 
        """
 
        Create repository using celery tasks
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 
        return run_task(tasks.create_repo, form_data, cur_user)
 

	
 
    def _update_permissions(self, repo, perms_new=None, perms_updates=None,
 
                            check_perms=True):
 
        if not perms_new:
 
            perms_new = []
 
        if not perms_updates:
 
            perms_updates = []
 

	
 
        # update permissions
 
        for member, perm, member_type in perms_updates:
 
            if member_type == 'user':
 
                # this updates existing one
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 
            # set new permissions
 
        for member, perm, member_type in perms_new:
 
            if member_type == 'user':
 
                self.grant_user_permission(
 
                    repo=repo, user=member, perm=perm
 
                )
 
            else:
 
                #check if we have permissions to alter this usergroup
 
                req_perms = (
 
                    'usergroup.read', 'usergroup.write', 'usergroup.admin')
 
                if not check_perms or HasUserGroupPermissionAny(*req_perms)(
 
                        member):
 
                    self.grant_user_group_permission(
 
                        repo=repo, group_name=member, perm=perm
 
                    )
 

	
 
    def create_fork(self, form_data, cur_user):
 
        """
 
        Simple wrapper into executing celery task for fork creation
 

	
 
        :param form_data:
 
        :param cur_user:
 
        """
 
        from kallithea.lib.celerylib import tasks, run_task
 
        return run_task(tasks.create_repo_fork, form_data, cur_user)
 

	
 
    def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
 
        """
 
        Delete given repository, forks parameter defines what do do with
 
        attached forks. Throws AttachedForksError if deleted repo has attached
 
        forks
 

	
 
        :param repo:
 
        :param forks: str 'delete' or 'detach'
 
        :param fs_remove: remove(archive) repo from filesystem
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        repo = self._get_repo(repo)
 
        if repo is not None:
 
            if forks == 'detach':
 
                for r in repo.forks:
 
                    r.fork = None
 
                    self.sa.add(r)
 
            elif forks == 'delete':
 
                for r in repo.forks:
 
                    self.delete(r, forks='delete')
 
            elif [f for f in repo.forks]:
 
                raise AttachedForksError()
 

	
 
            old_repo_dict = repo.get_dict()
 
            try:
 
                self.sa.delete(repo)
 
                if fs_remove:
 
                    self._delete_filesystem_repo(repo)
 
                else:
 
                    log.debug('skipping removal from filesystem')
 
                log_delete_repository(old_repo_dict,
 
                                      deleted_by=cur_user)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
    def grant_user_permission(self, repo, user, perm):
 
        """
 
        Grant permission for user on given repository, or update existing one
 
        if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .scalar()
 
        if obj is None:
 
            # create new !
 
            obj = UserRepoToPerm()
 
        obj.repository = repo
 
        obj.user = user
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, user, repo)
 
        return obj
 

	
 
    def revoke_user_permission(self, repo, user):
 
        """
 
        Revoke permission for user on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param user: Instance of User, user_id or username
 
        """
 

	
 
        user = self._get_user(user)
 
        repo = self._get_repo(repo)
 

	
 
        obj = self.sa.query(UserRepoToPerm) \
 
            .filter(UserRepoToPerm.repository == repo) \
 
            .filter(UserRepoToPerm.user == user) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm on %s on %s', repo, user)
 

	
 
    def grant_user_group_permission(self, repo, group_name, perm):
 
        """
 
        Grant permission for user group on given repository, or update
 
        existing one if found
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        :param perm: Instance of Permission, or permission_name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self._get_user_group(group_name)
 
        permission = self._get_perm(perm)
 

	
 
        # check if we have that permission already
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .scalar()
 

	
 
        if obj is None:
 
            # create new
 
            obj = UserGroupRepoToPerm()
 

	
 
        obj.repository = repo
 
        obj.users_group = group_name
 
        obj.permission = permission
 
        self.sa.add(obj)
 
        log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, repo, group_name):
 
        """
 
        Revoke permission for user group on given repository
 

	
 
        :param repo: Instance of Repository, repository_id, or repository name
 
        :param group_name: Instance of UserGroup, users_group_id,
 
            or user group name
 
        """
 
        repo = self._get_repo(repo)
 
        group_name = self._get_user_group(group_name)
 

	
 
        obj = self.sa.query(UserGroupRepoToPerm) \
 
            .filter(UserGroupRepoToPerm.repository == repo) \
 
            .filter(UserGroupRepoToPerm.users_group == group_name) \
 
            .scalar()
 
        if obj is not None:
 
            self.sa.delete(obj)
 
            log.debug('Revoked perm to %s on %s', repo, group_name)
 

	
 
    def delete_stats(self, repo_name):
 
        """
 
        removes stats for given repo
 

	
 
        :param repo_name:
 
        """
 
        repo = self._get_repo(repo_name)
 
        try:
 
            obj = self.sa.query(Statistics) \
 
                .filter(Statistics.repository == repo).scalar()
 
            if obj is not None:
 
                self.sa.delete(obj)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
 
                                clone_uri=None, repo_store_location=None):
 
        """
 
        Makes repository on filesystem. Operation is group aware, meaning that it will create
 
        a repository within a group, and alter the paths accordingly to the group location.
 

	
 
        :param repo_name:
 
        :param alias:
 
        :param parent:
 
        :param clone_uri:
 
        :param repo_store_location:
 
        """
 
        from kallithea.lib.utils import is_valid_repo, is_valid_repo_group
 
        from kallithea.model.scm import ScmModel
 

	
 
        if '/' in repo_name:
 
            raise ValueError('repo_name must not contain groups got `%s`' % repo_name)
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            new_parent_path = os.sep.join(repo_group.full_path_splitted)
 
        else:
 
            new_parent_path = repo_group or ''
 

	
 
        if repo_store_location:
 
            _paths = [repo_store_location]
 
        else:
 
            _paths = [self.repos_path, new_parent_path, repo_name]
 
            # we need to make it str for mercurial
 
        repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
 

	
 
        # check if this path is not a repository
 
        if is_valid_repo(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid repository' % repo_path)
 

	
 
        # check if this path is a group
 
        if is_valid_repo_group(repo_path, self.repos_path):
 
            raise Exception('This path %s is a valid group' % repo_path)
 

	
 
        log.info('creating repo %s in %s from url: `%s`',
 
            repo_name, safe_unicode(repo_path),
 
            obfuscate_url_pw(clone_uri))
 

	
 
        backend = get_backend(repo_type)
 

	
 
        if repo_type == 'hg':
 
            baseui = make_ui('db', clear_session=False)
 
            # patch and reset hooks section of UI config to not run any
 
            # hooks on creating remote repo
 
            for k, v in baseui.configitems('hooks'):
 
                baseui.setconfig('hooks', k, None)
 

	
 
            repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui)
 
        elif repo_type == 'git':
 
            repo = backend(repo_path, create=True, src_url=clone_uri, bare=True)
 
            # add kallithea hook into this repo
 
            ScmModel().install_git_hooks(repo=repo)
 
        else:
 
            raise Exception('Not supported repo_type %s expected hg/git' % repo_type)
 

	
 
        log.debug('Created repo %s with %s backend',
 
                  safe_unicode(repo_name), safe_unicode(repo_type))
 
        return repo
 

	
 
    def _rename_filesystem_repo(self, old, new):
 
        """
 
        renames repository on filesystem
 

	
 
        :param old: old name
 
        :param new: new name
 
        """
 
        log.info('renaming repo from %s to %s', old, new)
 

	
 
        old_path = os.path.join(self.repos_path, old)
 
        new_path = os.path.join(self.repos_path, new)
 
        if os.path.isdir(new_path):
 
            raise Exception(
 
                'Was trying to rename to already existing dir %s' % new_path
 
            )
 
        shutil.move(old_path, new_path)
 

	
 
    def _delete_filesystem_repo(self, repo):
 
        """
 
        removes repo from filesystem, the removal is actually done by
 
        renaming dir to a 'rm__*' prefix which Kallithea will skip.
 
        It can be undeleted later by reverting the rename.
 

	
 
        :param repo: repo object
 
        """
 
        rm_path = os.path.join(self.repos_path, repo.repo_name)
 
        log.info("Removing %s", rm_path)
 

	
 
        _now = datetime.now()
 
        _ms = str(_now.microsecond).rjust(6, '0')
 
        _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
 
                             repo.just_name)
 
        if repo.group:
 
            args = repo.group.full_path_splitted + [_d]
 
            _d = os.path.join(*args)
 
        shutil.move(rm_path, os.path.join(self.repos_path, _d))
kallithea/model/scm.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.scm
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Scm model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 9, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import os
 
import re
 
import time
 
import traceback
 
import logging
 
import cStringIO
 
import pkg_resources
 
from os.path import join as jn
 

	
 
from sqlalchemy import func
 
from pylons.i18n.translation import _
 

	
 
import kallithea
 
from kallithea.lib.vcs import get_backend
 
from kallithea.lib.vcs.exceptions import RepositoryError
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 

	
 
from kallithea import BACKENDS
 
from kallithea.lib import helpers as h
 
from kallithea.lib.utils2 import safe_str, safe_unicode, get_server_url,\
 
    _set_extras
 
from kallithea.lib.auth import HasRepoPermissionAny, HasRepoGroupPermissionAny,\
 
    HasUserGroupPermissionAny, HasPermissionAny, HasPermissionAll
 
from kallithea.lib.utils import get_filesystem_repos, make_ui, \
 
    action_logger
 
from kallithea.model import BaseModel
 
from kallithea.model.db import Repository, Ui, CacheInvalidation, \
 
    UserFollowing, UserLog, User, RepoGroup, PullRequest
 
from kallithea.lib.hooks import log_push_action
 
from kallithea.lib.exceptions import NonRelativePathError, IMCCommitError
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserTemp(object):
 
    def __init__(self, user_id):
 
        self.user_id = user_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
 

	
 

	
 
class RepoTemp(object):
 
    def __init__(self, repo_id):
 
        self.repo_id = repo_id
 

	
 
    def __repr__(self):
 
        return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
 

	
 

	
 
class CachedRepoList(object):
 
    """
 
    Cached repo list. Uses super-fast in-memory cache after initialization.
 
    """
 

	
 
    def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
 
        self.db_repo_list = db_repo_list
 
        self.repos_path = repos_path
 
        self.order_by = order_by
 
        self.reversed = (order_by or '').startswith('-')
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write',
 
                        'repository.admin']
 
        self.perm_set = perm_set
 

	
 
    def __len__(self):
 
        return len(self.db_repo_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        # pre-propagated valid_cache_keys to save executing select statements
 
        # for each repo
 
        valid_cache_keys = CacheInvalidation.get_valid_cache_keys()
 

	
 
        for dbr in self.db_repo_list:
 
            scmr = dbr.scm_instance_cached(valid_cache_keys)
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            try:
 
                last_change = scmr.last_change
 
                tip = h.get_changeset_safe(scmr, 'tip')
 
            except Exception:
 
                log.error(
 
                    '%s this repository is present in database but it '
 
                    'cannot be created as an scm instance, org_exc:%s'
 
                    % (dbr.repo_name, traceback.format_exc())
 
                )
 
                continue
 

	
 
            tmp_d = {}
 
            tmp_d['name'] = dbr.repo_name
 
            tmp_d['name_sort'] = tmp_d['name'].lower()
 
            tmp_d['raw_name'] = tmp_d['name'].lower()
 
            tmp_d['description'] = dbr.description
 
            tmp_d['description_sort'] = tmp_d['description'].lower()
 
            tmp_d['last_change'] = last_change
 
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
 
            tmp_d['tip'] = tip.raw_id
 
            tmp_d['tip_sort'] = tip.revision
 
            tmp_d['rev'] = tip.revision
 
            tmp_d['contact'] = dbr.user.full_contact
 
            tmp_d['contact_sort'] = tmp_d['contact']
 
            tmp_d['owner_sort'] = tmp_d['contact']
 
            tmp_d['repo_archives'] = list(scmr._get_archives())
 
            tmp_d['last_msg'] = tip.message
 
            tmp_d['author'] = tip.author
 
            tmp_d['dbrepo'] = dbr.get_dict()
 
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
 
            yield tmp_d
 

	
 

	
 
class SimpleCachedRepoList(CachedRepoList):
 
    """
 
    Lighter version of CachedRepoList without the scm initialisation
 
    """
 

	
 
    def __iter__(self):
 
        for dbr in self.db_repo_list:
 
            # check permission at this level
 
            if not HasRepoPermissionAny(
 
                *self.perm_set)(dbr.repo_name, 'get repo check'):
 
                continue
 

	
 
            tmp_d = {
 
                'name': dbr.repo_name,
 
                'dbrepo': dbr.get_dict(),
 
                'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
 
            }
 
            yield tmp_d
 

	
 

	
 
class _PermCheckIterator(object):
 
    def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None):
 
        """
 
        Creates iterator from given list of objects, additionally
 
        checking permission for them from perm_set var
 

	
 
        :param obj_list: list of db objects
 
        :param obj_attr: attribute of object to pass into perm_checker
 
        :param perm_set: list of permissions to check
 
        :param perm_checker: callable to check permissions against
 
        """
 
        self.obj_list = obj_list
 
        self.obj_attr = obj_attr
 
        self.perm_set = perm_set
 
        self.perm_checker = perm_checker
 
        self.extra_kwargs = extra_kwargs or {}
 

	
 
    def __len__(self):
 
        return len(self.obj_list)
 

	
 
    def __repr__(self):
 
        return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
 

	
 
    def __iter__(self):
 
        for db_obj in self.obj_list:
 
            # check permission at this level
 
            name = getattr(db_obj, self.obj_attr, None)
 
            if not self.perm_checker(*self.perm_set)(
 
                    name, self.__class__.__name__, **self.extra_kwargs):
 
                continue
 

	
 
            yield db_obj
 

	
 

	
 
class RepoList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['repository.read', 'repository.write', 'repository.admin']
 

	
 
        super(RepoList, self).__init__(obj_list=db_repo_list,
 
                    obj_attr='repo_name', perm_set=perm_set,
 
                    perm_checker=HasRepoPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class RepoGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['group.read', 'group.write', 'group.admin']
 

	
 
        super(RepoGroupList, self).__init__(obj_list=db_repo_group_list,
 
                    obj_attr='group_name', perm_set=perm_set,
 
                    perm_checker=HasRepoGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class UserGroupList(_PermCheckIterator):
 

	
 
    def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
 
        if not perm_set:
 
            perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
 

	
 
        super(UserGroupList, self).__init__(obj_list=db_user_group_list,
 
                    obj_attr='users_group_name', perm_set=perm_set,
 
                    perm_checker=HasUserGroupPermissionAny,
 
                    extra_kwargs=extra_kwargs)
 

	
 

	
 
class ScmModel(BaseModel):
 
    """
 
    Generic Scm Model
 
    """
 

	
 
    def __get_repo(self, instance):
 
        cls = Repository
 
        if isinstance(instance, cls):
 
            return instance
 
        elif isinstance(instance, int) or safe_str(instance).isdigit():
 
            return cls.get(instance)
 
        elif isinstance(instance, basestring):
 
            return cls.get_by_repo_name(instance)
 
        elif instance is not None:
 
            raise Exception('given object must be int, basestr or Instance'
 
                            ' of %s got %s' % (type(cls), type(instance)))
 

	
 
    @LazyProperty
 
    def repos_path(self):
 
        """
 
        Gets the repositories root path from database
 
        """
 

	
 
        q = self.sa.query(Ui).filter(Ui.ui_key == '/').one()
 

	
 
        return q.ui_value
 

	
 
    def repo_scan(self, repos_path=None):
 
        """
 
        Listing of repositories in given path. This path should not be a
 
        repository itself. Return a dictionary of repository objects
 

	
 
        :param repos_path: path to directory containing repositories
 
        """
 

	
 
        if repos_path is None:
 
            repos_path = self.repos_path
 

	
 
        log.info('scanning for repositories in %s', repos_path)
 

	
 
        baseui = make_ui('db')
 
        repos = {}
 

	
 
        for name, path in get_filesystem_repos(repos_path, recursive=True):
 
            # name need to be decomposed and put back together using the /
 
            # since this is internal storage separator for kallithea
 
            name = Repository.normalize_repo_name(name)
 

	
 
            try:
 
                if name in repos:
 
                    raise RepositoryError('Duplicate repository name %s '
 
                                          'found in %s' % (name, path))
 
                else:
 

	
 
                    klass = get_backend(path[0])
 

	
 
                    if path[0] == 'hg' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(safe_str(path[1]), baseui=baseui)
 

	
 
                    if path[0] == 'git' and path[0] in BACKENDS.keys():
 
                        repos[name] = klass(path[1])
 
            except OSError:
 
                continue
 
        log.debug('found %s paths with repositories', len(repos))
 
        return repos
 

	
 
    def get_repos(self, all_repos=None, sort_key=None, simple=False):
 
        """
 
        Get all repos from db and for each repo create its
 
        backend instance and fill that backed with information from database
 

	
 
        :param all_repos: list of repository names as strings
 
            give specific repositories list, good for filtering
 

	
 
        :param sort_key: initial sorting of repos
 
        :param simple: use SimpleCachedList - one without the SCM info
 
        """
 
        if all_repos is None:
 
            all_repos = self.sa.query(Repository)\
 
                        .filter(Repository.group_id == None)\
 
                        .order_by(func.lower(Repository.repo_name)).all()
 
        if simple:
 
            repo_iter = SimpleCachedRepoList(all_repos,
 
                                             repos_path=self.repos_path,
 
                                             order_by=sort_key)
 
        else:
 
            repo_iter = CachedRepoList(all_repos,
 
                                       repos_path=self.repos_path,
 
                                       order_by=sort_key)
 

	
 
        return repo_iter
 

	
 
    def get_repo_groups(self, all_groups=None):
 
        if all_groups is None:
 
            all_groups = RepoGroup.query()\
 
                .filter(RepoGroup.group_parent_id == None).all()
 
        return [x for x in RepoGroupList(all_groups)]
 

	
 
    def mark_for_invalidation(self, repo_name, delete=False):
 
        """
 
        Mark caches of this repo invalid in the database.
 

	
 
        :param repo_name: the repo for which caches should be marked invalid
 
        """
 
        CacheInvalidation.set_invalidate(repo_name, delete=delete)
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is not None:
 
            repo.update_changeset_cache()
 

	
 
    def toggle_following_repo(self, follow_repo_id, user_id):
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repo_id == follow_repo_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                action_logger(UserTemp(user_id),
 
                              'stopped_following_repo',
 
                              RepoTemp(follow_repo_id))
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_repo_id = follow_repo_id
 
            self.sa.add(f)
 

	
 
            action_logger(UserTemp(user_id),
 
                          'started_following_repo',
 
                          RepoTemp(follow_repo_id))
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def toggle_following_user(self, follow_user_id, user_id):
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user_id == follow_user_id)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        if f is not None:
 
            try:
 
                self.sa.delete(f)
 
                return
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                raise
 

	
 
        try:
 
            f = UserFollowing()
 
            f.user_id = user_id
 
            f.follows_user_id = follow_user_id
 
            self.sa.add(f)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def is_following_repo(self, repo_name, user_id, cache=False):
 
        r = self.sa.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_repository == r)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def is_following_user(self, username, user_id, cache=False):
 
        u = User.get_by_username(username)
 

	
 
        f = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.follows_user == u)\
 
            .filter(UserFollowing.user_id == user_id).scalar()
 

	
 
        return f is not None
 

	
 
    def get_followers(self, repo):
 
        repo = self._get_repo(repo)
 

	
 
        return self.sa.query(UserFollowing)\
 
                .filter(UserFollowing.follows_repository == repo).count()
 

	
 
    def get_forks(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(Repository)\
 
                .filter(Repository.fork == repo).count()
 

	
 
    def get_pull_requests(self, repo):
 
        repo = self._get_repo(repo)
 
        return self.sa.query(PullRequest)\
 
                .filter(PullRequest.other_repo == repo)\
 
                .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
 

	
 
    def mark_as_fork(self, repo, fork, user):
 
        repo = self.__get_repo(repo)
 
        fork = self.__get_repo(fork)
 
        if fork and repo.repo_id == fork.repo_id:
 
            raise Exception("Cannot set repository as fork of itself")
 

	
 
        if fork and repo.repo_type != fork.repo_type:
 
            raise RepositoryError("Cannot set repository as fork of repository with other type")
 

	
 
        repo.fork = fork
 
        self.sa.add(repo)
 
        return repo
 

	
 
    def _handle_rc_scm_extras(self, username, repo_name, repo_alias,
 
                              action=None):
 
        from kallithea import CONFIG
 
        from kallithea.lib.base import _get_ip_addr
 
        try:
 
            from pylons import request
 
            environ = request.environ
 
        except TypeError:
 
            # we might use this outside of request context, let's fake the
 
            # environ data
 
            from webob import Request
 
            environ = Request.blank('').environ
 
        extras = {
 
            'ip': _get_ip_addr(environ),
 
            'username': username,
 
            'action': action or 'push_local',
 
            'repository': repo_name,
 
            'scm': repo_alias,
 
            'config': CONFIG['__file__'],
 
            'server_url': get_server_url(environ),
 
            'make_lock': None,
 
            'locked_by': [None, None]
 
        }
 
        _set_extras(extras)
 

	
 
    def _handle_push(self, repo, username, action, repo_name, revisions):
 
        """
 
        Triggers push action hooks
 

	
 
        :param repo: SCM repo
 
        :param username: username who pushes
 
        :param action: push/push_local/push_remote
 
        :param repo_name: name of repo
 
        :param revisions: list of revisions that we pushed
 
        """
 
        self._handle_rc_scm_extras(username, repo_name, repo_alias=repo.alias)
 
        _scm_repo = repo._repo
 
        # trigger push hook
 
        if repo.alias == 'hg':
 
            log_push_action(_scm_repo.ui, _scm_repo, node=revisions[0])
 
        elif repo.alias == 'git':
 
            log_push_action(None, _scm_repo, _git_revs=revisions)
 

	
 
    def _get_IMC_module(self, scm_type):
 
        """
 
        Returns InMemoryCommit class based on scm_type
 

	
 
        :param scm_type:
 
        """
 
        if scm_type == 'hg':
 
            from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset
 
            return MercurialInMemoryChangeset
 

	
 
        if scm_type == 'git':
 
            from kallithea.lib.vcs.backends.git import GitInMemoryChangeset
 
            return GitInMemoryChangeset
 

	
 
        raise Exception('Invalid scm_type, must be one of hg,git got %s'
 
                        % (scm_type,))
 

	
 
    def pull_changes(self, repo, username):
 
        """
 
        Pull from "clone URL".
 
        """
 
        dbrepo = self.__get_repo(repo)
 
        clone_uri = dbrepo.clone_uri
 
        if not clone_uri:
 
            raise Exception("This repository doesn't have a clone uri")
 

	
 
        repo = dbrepo.scm_instance
 
        repo_name = dbrepo.repo_name
 
        try:
 
            if repo.alias == 'git':
 
                repo.fetch(clone_uri)
 
                # git doesn't really have something like post-fetch action
 
                # we fake that now. #TODO: extract fetched revisions somehow
 
                # here
 
                self._handle_push(repo,
 
                                  username=username,
 
                                  action='push_remote',
 
                                  repo_name=repo_name,
 
                                  revisions=[])
 
            else:
 
                self._handle_rc_scm_extras(username, dbrepo.repo_name,
 
                                           repo.alias, action='push_remote')
 
                repo.pull(clone_uri)
 

	
 
            self.mark_for_invalidation(repo_name)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def commit_change(self, repo, repo_name, cs, user, author, message,
 
                      content, f_path):
 
        """
 
        Commit a change to a single file
 

	
 
        :param repo: a db_repo.scm_instance
 
        """
 
        user = self._get_user(user)
 
        IMC = self._get_IMC_module(repo.alias)
 

	
 
        # decoding here will force that we have proper encoded values
 
        # in any other case this will throw exceptions and deny commit
 
        content = safe_str(content)
 
        path = safe_str(f_path)
 
        # message and author needs to be unicode
 
        # proper backend should then translate that into required type
 
        message = safe_unicode(message)
 
        author = safe_unicode(author)
 
        imc = IMC(repo)
 
        imc.change(FileNode(path, content, mode=cs.get_file_mode(f_path)))
 
        try:
 
            tip = imc.commit(message=message, author=author,
 
                             parents=[cs], branch=cs.branch)
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            raise IMCCommitError(str(e))
 
        finally:
 
            # always clear caches, if commit fails we want fresh object also
 
            self.mark_for_invalidation(repo_name)
 
        self._handle_push(repo,
 
                          username=user.username,
 
                          action='push_local',
 
                          repo_name=repo_name,
 
                          revisions=[tip.raw_id])
 
        return tip
 

	
 
    def _sanitize_path(self, f_path):
 
        if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path:
 
            raise NonRelativePathError('%s is not an relative path' % f_path)
 
        if f_path:
 
            f_path = os.path.normpath(f_path)
 
        return f_path
 

	
 
    def get_nodes(self, repo_name, revision, root_path='/', flat=True):
 
        """
 
        Recursively walk root dir and return a set of all paths found.
 

	
 
        :param repo_name: name of repository
 
        :param revision: revision for which to list nodes
 
        :param root_path: root path to list
 
        :param flat: return as a list, if False returns a dict with description
 

	
 
        """
 
        _files = list()
 
        _dirs = list()
 
        try:
 
            _repo = self.__get_repo(repo_name)
 
            changeset = _repo.scm_instance.get_changeset(revision)
 
            root_path = root_path.lstrip('/')
 
            for topnode, dirs, files in changeset.walk(root_path):
 
                for f in files:
 
                    _files.append(f.path if flat else {"name": f.path,
 
                                                       "type": "file"})
 
                for d in dirs:
 
                    _dirs.append(d.path if flat else {"name": d.path,
 
                                                      "type": "dir"})
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            raise
 

	
 
        return _dirs, _files
 

	
 
    def create_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            content = nodes[f_path]['content']
 
            f_path = safe_str(f_path)
 
            # decoding here will force that we have proper encoded values
 
            # in any other case this will throw exceptions and deny commit
 
            if isinstance(content, (basestring,)):
 
                content = safe_str(content)
 
            elif isinstance(content, (file, cStringIO.OutputType,)):
 
                content = content.read()
 
            else:
 
                raise Exception('Content is of unrecognized type %s' % (
 
                    type(content)
 
                ))
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.add(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        return tip
 

	
 
    def update_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Commits specified nodes to repo. Again.
 
        """
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        imc_class = self._get_IMC_module(scm_instance.alias)
 
        imc = imc_class(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 

	
 
        # add multiple nodes
 
        for _filename, data in nodes.items():
 
            # new filename, can be renamed from the old one
 
            filename = self._sanitize_path(data['filename'])
 
            old_filename = self._sanitize_path(_filename)
 
            content = data['content']
 

	
 
            filenode = FileNode(old_filename, content=content)
 
            op = data['op']
 
            if op == 'add':
 
                imc.add(filenode)
 
            elif op == 'del':
 
                imc.remove(filenode)
 
            elif op == 'mod':
 
                if filename != old_filename:
 
                    #TODO: handle renames, needs vcs lib changes
 
                    imc.remove(filenode)
 
                    imc.add(FileNode(filename, content=content))
 
                else:
 
                    imc.change(filenode)
 

	
 
        # commit changes
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 

	
 
    def delete_nodes(self, user, repo, message, nodes, parent_cs=None,
 
                     author=None, trigger_push_hook=True):
 
        """
 
        Deletes specified nodes from repo.
 

	
 
        :param user: Kallithea User object or user_id, the committer
 
        :param repo: Kallithea Repository object
 
        :param message: commit message
 
        :param nodes: mapping {filename:{'content':content},...}
 
        :param parent_cs: parent changeset, can be empty than it's initial commit
 
        :param author: author of commit, cna be different that committer only for git
 
        :param trigger_push_hook: trigger push hooks
 

	
 
        :returns: new committed changeset after deletion
 
        """
 

	
 
        user = self._get_user(user)
 
        scm_instance = repo.scm_instance_no_cache()
 

	
 
        processed_nodes = []
 
        for f_path in nodes:
 
            f_path = self._sanitize_path(f_path)
 
            # content can be empty but for compatibility it allows same dicts
 
            # structure as add_nodes
 
            content = nodes[f_path].get('content')
 
            processed_nodes.append((f_path, content))
 

	
 
        message = safe_unicode(message)
 
        committer = user.full_contact
 
        author = safe_unicode(author) if author else committer
 

	
 
        IMC = self._get_IMC_module(scm_instance.alias)
 
        imc = IMC(scm_instance)
 

	
 
        if not parent_cs:
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.remove(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        self.mark_for_invalidation(repo.repo_name)
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)