Changeset - 7691290837d2
[Not reviewed]
default
! ! !
Lars Kruse - 8 years ago 2017-08-25 14:32:50
devel@sumpfralle.de
codingstyle: trivial whitespace fixes

Reported by flake8.
51 files changed:
Changeset was too big and was cut off... Show full diff anyway
0 comments (0 inline, 0 general)
kallithea/bin/base.py
Show inline comments
 
@@ -137,28 +137,28 @@ class RcConf(object):
 

	
 
    def update_config(self, new_config):
 
        """
 
        Reads the JSON config updates it's values with new_config and
 
        saves it back as JSON dump
 

	
 
        :param new_config:
 
        """
 
        config = {}
 
        try:
 
            with open(self._conf_name, 'rb') as conf:
 
                config = json.load(conf)
 
        except IOError as e:
 
            sys.stderr.write(str(e) + '\n')
 

	
 
        config.update(new_config)
 
        self.make_config(config)
 

	
 
    def load_config(self):
 
        """
 
        Loads config from file and returns loaded JSON object
 
        """
 
        try:
 
            with open(self._conf_name, 'rb') as conf:
 
                return  json.load(conf)
 
                return json.load(conf)
 
        except IOError as e:
 
            #sys.stderr.write(str(e) + '\n')
 
            pass
kallithea/bin/kallithea_api.py
Show inline comments
 
@@ -100,26 +100,27 @@ def main(argv=None):
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
    return 0
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_backup.py
Show inline comments
 
@@ -71,34 +71,35 @@ class BackupManager(object):
 
        for dir_name in os.listdir(self.repos_path):
 
            logging.info('backing up %s', dir_name)
 
            tar.add(os.path.join(self.repos_path, dir_name), dir_name)
 
        tar.close()
 
        logging.info('finished backup of mercurial repositories')
 

	
 
    def transfer_files(self):
 
        params = {
 
                  'id_rsa_key': self.id_rsa_path,
 
                  'backup_file': os.path.join(self.backup_file_path,
 
                                             self.backup_file_name),
 
                  'backup_server': self.backup_server
 
                  }
 
        cmd = ['scp', '-l', '40000', '-i', '%(id_rsa_key)s' % params,
 
               '%(backup_file)s' % params,
 
               '%(backup_server)s' % params]
 

	
 
        subprocess.call(cmd)
 
        logging.info('Transferred file %s to %s', self.backup_file_name, cmd[4])
 

	
 
    def rm_file(self):
 
        logging.info('Removing file %s', self.backup_file_name)
 
        os.remove(os.path.join(self.backup_file_path, self.backup_file_name))
 

	
 

	
 
if __name__ == "__main__":
 

	
 
    repo_location = '/home/repo_path'
 
    backup_server = 'root@192.168.1.100:/backups/mercurial'
 
    rsa_key = '/home/id_rsa'
 

	
 
    B_MANAGER = BackupManager(repo_location, rsa_key, backup_server)
 
    B_MANAGER.backup_repos()
 
    B_MANAGER.transfer_files()
 
    B_MANAGER.rm_file()
kallithea/config/app_cfg.py
Show inline comments
 
@@ -74,48 +74,49 @@ class KallitheaAppConfig(AppConfig):
 
        self['default_renderer'] = 'mako'
 
        self['use_dotted_templatenames'] = False
 

	
 
        # Configure Sessions, store data as JSON to avoid pickle security issues
 
        self['session.enabled'] = True
 
        self['session.data_serializer'] = 'json'
 

	
 
        # Configure the base SQLALchemy Setup
 
        self['use_sqlalchemy'] = True
 
        self['model'] = kallithea.model.base
 
        self['DBSession'] = kallithea.model.meta.Session
 

	
 
        # Configure App without an authentication backend.
 
        self['auth_backend'] = None
 

	
 
        # Use custom error page for these errors. By default, Turbogears2 does not add
 
        # 400 in this list.
 
        # Explicitly listing all is considered more robust than appending to defaults,
 
        # in light of possible future framework changes.
 
        self['errorpage.status_codes'] = [400, 401, 403, 404]
 

	
 
        # Disable transaction manager -- currently Kallithea takes care of transactions itself
 
        self['tm.enabled'] = False
 

	
 

	
 
base_config = KallitheaAppConfig()
 

	
 
# TODO still needed as long as we use pylonslib
 
sys.modules['pylons'] = tg
 

	
 
# DebugBar, a debug toolbar for TurboGears2.
 
# (https://github.com/TurboGears/tgext.debugbar)
 
# To enable it, install 'tgext.debugbar' and 'kajiki', and run Kallithea with
 
# 'debug = true' (not in production!)
 
# See the Kallithea documentation for more information.
 
try:
 
    from tgext.debugbar import enable_debugbar
 
    import kajiki # only to check its existence
 
except ImportError:
 
    pass
 
else:
 
    base_config['renderers'].append('kajiki')
 
    enable_debugbar(base_config)
 

	
 

	
 
def setup_configuration(app):
 
    config = app.config
 

	
 
    if config.get('ignore_alembic_revision', False):
 
@@ -153,41 +154,43 @@ def setup_configuration(app):
 
    repos_path = make_ui('db').configitems('paths')[0][1]
 
    config['base_path'] = repos_path
 
    set_app_settings(config)
 

	
 
    instance_id = kallithea.CONFIG.get('instance_id', '*')
 
    if instance_id == '*':
 
        instance_id = '%s-%s' % (platform.uname()[1], os.getpid())
 
        kallithea.CONFIG['instance_id'] = instance_id
 

	
 
    # update kallithea.CONFIG with the meanwhile changed 'config'
 
    kallithea.CONFIG.update(config)
 

	
 
    # configure vcs and indexer libraries (they are supposed to be independent
 
    # as much as possible and thus avoid importing tg.config or
 
    # kallithea.CONFIG).
 
    set_vcs_config(kallithea.CONFIG)
 
    set_indexer_config(kallithea.CONFIG)
 

	
 
    check_git_version()
 

	
 
    if str2bool(config.get('initial_repo_scan', True)):
 
        repo2db_mapper(ScmModel().repo_scan(repos_path),
 
                       remove_obsolete=False, install_git_hooks=False)
 

	
 

	
 
hooks.register('configure_new_app', setup_configuration)
 

	
 

	
 
def setup_application(app):
 
    config = app.config
 

	
 
    # we want our low level middleware to get to the request ASAP. We don't
 
    # need any stack middleware in them - especially no StatusCodeRedirect buffering
 
    app = SimpleHg(app, config)
 
    app = SimpleGit(app, config)
 

	
 
    # Enable https redirects based on HTTP_X_URL_SCHEME set by proxy
 
    if any(asbool(config.get(x)) for x in ['https_fixup', 'force_https', 'use_htsts']):
 
        app = HttpsFixup(app, config)
 
    return app
 

	
 

	
 
hooks.register('before_config', setup_application)
kallithea/config/middleware.py
Show inline comments
 
@@ -2,48 +2,50 @@
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""WSGI middleware initialization for the Kallithea application."""
 

	
 
import logging.config
 
from kallithea.config.app_cfg import base_config
 
from kallithea.config.environment import load_environment
 

	
 
__all__ = ['make_app']
 

	
 
# Use base_config to setup the necessary PasteDeploy application factory.
 
# make_base_app will wrap the TurboGears2 app with all the middleware it needs.
 
make_base_app = base_config.setup_tg_wsgi_app(load_environment)
 

	
 

	
 
def make_app_without_logging(global_conf, full_stack=True, **app_conf):
 
    """The core of make_app for use from gearbox commands (other than 'serve')"""
 
    return make_base_app(global_conf, full_stack=full_stack, **app_conf)
 

	
 

	
 
def make_app(global_conf, full_stack=True, **app_conf):
 
    """
 
    Set up Kallithea with the settings found in the PasteDeploy configuration
 
    file used.
 

	
 
    :param global_conf: The global settings for Kallithea (those
 
        defined under the ``[DEFAULT]`` section).
 
    :type global_conf: dict
 
    :param full_stack: Should the whole TurboGears2 stack be set up?
 
    :type full_stack: str or bool
 
    :return: The Kallithea application with all the relevant middleware
 
        loaded.
 

	
 
    This is the PasteDeploy factory for the Kallithea application.
 

	
 
    ``app_conf`` contains all the application-specific settings (those defined
 
    under ``[app:main]``.
 
    """
 
    logging.config.fileConfig(global_conf['__file__'])
 
    return make_app_without_logging(global_conf, full_stack=full_stack, **app_conf)
kallithea/config/post_receive_tmpl.py
Show inline comments
 
@@ -4,26 +4,27 @@ import sys
 
# set output mode on windows to binary for stderr
 
# this prevents python (or the windows console) from replacing \n with  \r\n
 
# git doesn't display remote output lines that contain \r
 
# and therefore without this modification git would displayes empty lines
 
# instead of the exception output
 
if sys.platform == "win32":
 
    import msvcrt
 
    msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
 

	
 
KALLITHEA_HOOK_VER = '_TMPL_'
 
os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER
 
from kallithea.lib.hooks import handle_git_post_receive as _handler
 

	
 

	
 
def main():
 
    repo_path = os.path.abspath('.')
 
    push_data = sys.stdin.readlines()
 
    # os.environ is modified here by a subprocess call that
 
    # runs git and later git executes this hook.
 
    # Environ gets some additional info from kallithea system
 
    # like IP or username from basic-auth
 
    _handler(repo_path, push_data, os.environ)
 
    sys.exit(0)
 

	
 

	
 
if __name__ == '__main__':
 
    main()
kallithea/config/pre_receive_tmpl.py
Show inline comments
 
@@ -4,26 +4,27 @@ import sys
 
# set output mode on windows to binary for stderr
 
# this prevents python (or the windows console) from replacing \n with  \r\n
 
# git doesn't display remote output lines that contain \r
 
# and therefore without this modification git would displayes empty lines
 
# instead of the exception output
 
if sys.platform == "win32":
 
    import msvcrt
 
    msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
 

	
 
KALLITHEA_HOOK_VER = '_TMPL_'
 
os.environ['KALLITHEA_HOOK_VER'] = KALLITHEA_HOOK_VER
 
from kallithea.lib.hooks import handle_git_pre_receive as _handler
 

	
 

	
 
def main():
 
    repo_path = os.path.abspath('.')
 
    push_data = sys.stdin.readlines()
 
    # os.environ is modified here by a subprocess call that
 
    # runs git and later git executes this hook.
 
    # Environ gets some additional info from kallithea system
 
    # like IP or username from basic-auth
 
    _handler(repo_path, push_data, os.environ)
 
    sys.exit(0)
 

	
 

	
 
if __name__ == '__main__':
 
    main()
kallithea/config/rcextensions/__init__.py
Show inline comments
 
@@ -28,188 +28,202 @@ EXTRA_INDEX_EXTENSIONS = []
 

	
 
#==============================================================================
 
# POST CREATE REPOSITORY HOOK
 
#==============================================================================
 
# this function will be executed after each repository is created
 
def _crrepohook(*args, **kwargs):
 
    """
 
    Post create repository HOOK
 
    kwargs available:
 
     :param repo_name:
 
     :param repo_type:
 
     :param description:
 
     :param private:
 
     :param created_on:
 
     :param enable_downloads:
 
     :param repo_id:
 
     :param owner_id:
 
     :param enable_statistics:
 
     :param clone_uri:
 
     :param fork_id:
 
     :param group_id:
 
     :param created_by:
 
    """
 
    return 0
 

	
 

	
 
CREATE_REPO_HOOK = _crrepohook
 

	
 

	
 
#==============================================================================
 
# PRE CREATE USER HOOK
 
#==============================================================================
 
# this function will be executed before each user is created
 
def _pre_cruserhook(*args, **kwargs):
 
    """
 
    Pre create user HOOK, it returns a tuple of bool, reason.
 
    If bool is False the user creation will be stopped and reason
 
    will be displayed to the user.
 
    kwargs available:
 
    :param username:
 
    :param password:
 
    :param email:
 
    :param firstname:
 
    :param lastname:
 
    :param active:
 
    :param admin:
 
    :param created_by:
 
    """
 
    reason = 'allowed'
 
    return True, reason
 

	
 

	
 
PRE_CREATE_USER_HOOK = _pre_cruserhook
 

	
 
#==============================================================================
 
# POST CREATE USER HOOK
 
#==============================================================================
 
# this function will be executed after each user is created
 
def _cruserhook(*args, **kwargs):
 
    """
 
    Post create user HOOK
 
    kwargs available:
 
      :param username:
 
      :param full_name_or_username:
 
      :param full_contact:
 
      :param user_id:
 
      :param name:
 
      :param firstname:
 
      :param short_contact:
 
      :param admin:
 
      :param lastname:
 
      :param ip_addresses:
 
      :param ldap_dn:
 
      :param email:
 
      :param api_key:
 
      :param last_login:
 
      :param full_name:
 
      :param active:
 
      :param password:
 
      :param emails:
 
      :param inherit_default_permissions:
 
      :param created_by:
 
    """
 
    return 0
 

	
 

	
 
CREATE_USER_HOOK = _cruserhook
 

	
 

	
 
#==============================================================================
 
# POST DELETE REPOSITORY HOOK
 
#==============================================================================
 
# this function will be executed after each repository deletion
 
def _dlrepohook(*args, **kwargs):
 
    """
 
    Post delete repository HOOK
 
    kwargs available:
 
     :param repo_name:
 
     :param repo_type:
 
     :param description:
 
     :param private:
 
     :param created_on:
 
     :param enable_downloads:
 
     :param repo_id:
 
     :param owner_id:
 
     :param enable_statistics:
 
     :param clone_uri:
 
     :param fork_id:
 
     :param group_id:
 
     :param deleted_by:
 
     :param deleted_on:
 
    """
 
    return 0
 

	
 

	
 
DELETE_REPO_HOOK = _dlrepohook
 

	
 

	
 
#==============================================================================
 
# POST DELETE USER HOOK
 
#==============================================================================
 
# this function will be executed after each user is deleted
 
def _dluserhook(*args, **kwargs):
 
    """
 
    Post delete user HOOK
 
    kwargs available:
 
      :param username:
 
      :param full_name_or_username:
 
      :param full_contact:
 
      :param user_id:
 
      :param name:
 
      :param firstname:
 
      :param short_contact:
 
      :param admin:
 
      :param lastname:
 
      :param ip_addresses:
 
      :param ldap_dn:
 
      :param email:
 
      :param api_key:
 
      :param last_login:
 
      :param full_name:
 
      :param active:
 
      :param password:
 
      :param emails:
 
      :param inherit_default_permissions:
 
      :param deleted_by:
 
    """
 
    return 0
 

	
 

	
 
DELETE_USER_HOOK = _dluserhook
 

	
 

	
 
#==============================================================================
 
# POST PUSH HOOK
 
#==============================================================================
 

	
 
# this function will be executed after each push it's executed after the
 
# build-in hook that Kallithea uses for logging pushes
 
def _pushhook(*args, **kwargs):
 
    """
 
    Post push hook
 
    kwargs available:
 

	
 
      :param server_url: url of instance that triggered this hook
 
      :param config: path to .ini config used
 
      :param scm: type of VS 'git' or 'hg'
 
      :param username: name of user who pushed
 
      :param ip: ip of who pushed
 
      :param action: push
 
      :param repository: repository name
 
      :param pushed_revs: list of pushed revisions
 
    """
 
    return 0
 

	
 

	
 
PUSH_HOOK = _pushhook
 

	
 

	
 
#==============================================================================
 
# POST PULL HOOK
 
#==============================================================================
 

	
 
# this function will be executed after each push it's executed after the
 
# build-in hook that Kallithea uses for logging pulls
 
def _pullhook(*args, **kwargs):
 
    """
 
    Post pull hook
 
    kwargs available::
 

	
 
      :param server_url: url of instance that triggered this hook
 
      :param config: path to .ini config used
 
      :param scm: type of VS 'git' or 'hg'
 
      :param username: name of user who pulled
 
      :param ip: ip of who pulled
 
      :param action: pull
 
      :param repository: repository name
 
    """
 
    return 0
 

	
 

	
 
PULL_HOOK = _pullhook
kallithea/config/routing.py
Show inline comments
 
@@ -73,435 +73,428 @@ def make_map(config):
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        repo_group_name = match_dict.get('group_name')
 
        return is_valid_repo_group(repo_group_name, config['base_path'],
 
                                   skip_path_check=True)
 

	
 
    def check_user_group(environ, match_dict):
 
        """
 
        check for valid user group for proper 404 handling
 

	
 
        :param environ:
 
        :param match_dict:
 
        """
 
        return True
 

	
 
    def check_int(environ, match_dict):
 
        return match_dict.get('id').isdigit()
 

	
 
    #==========================================================================
 
    # CUSTOM ROUTES HERE
 
    #==========================================================================
 

	
 
    #MAIN PAGE
 
    # MAIN PAGE
 
    rmap.connect('home', '/', controller='home', action='index')
 
    rmap.connect('about', '/about', controller='home', action='about')
 
    rmap.connect('repo_switcher_data', '/_repos', controller='home',
 
                 action='repo_switcher_data')
 

	
 
    rmap.connect('rst_help',
 
                 "http://docutils.sourceforge.net/docs/user/rst/quickref.html",
 
                 _static=True)
 
    rmap.connect('kallithea_project_url', "https://kallithea-scm.org/", _static=True)
 
    rmap.connect('issues_url', 'https://bitbucket.org/conservancy/kallithea/issues', _static=True)
 

	
 
    #ADMIN REPOSITORY ROUTES
 
    # ADMIN REPOSITORY ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/repos') as m:
 
        m.connect("repos", "/repos",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("repos", "/repos",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_repo", "/create_repository",
 
                  action="create_repository", conditions=dict(method=["GET"]))
 
        m.connect("update_repo", "/repos/{repo_name:.*?}",
 
                  action="update", conditions=dict(method=["POST"],
 
                  function=check_repo))
 
        m.connect("delete_repo", "/repos/{repo_name:.*?}/delete",
 
                  action="delete", conditions=dict(method=["POST"]))
 

	
 
    #ADMIN REPOSITORY GROUPS ROUTES
 
    # ADMIN REPOSITORY GROUPS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/repo_groups') as m:
 
        m.connect("repos_groups", "/repo_groups",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("repos_groups", "/repo_groups",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_repos_group", "/repo_groups/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_repos_group", "/repo_groups/{group_name:.*?}",
 
                  action="update", conditions=dict(method=["POST"],
 
                                                   function=check_group))
 

	
 
        m.connect("repos_group", "/repo_groups/{group_name:.*?}",
 
                  action="show", conditions=dict(method=["GET"],
 
                                                 function=check_group))
 

	
 
        #EXTRAS REPO GROUP ROUTES
 
        # EXTRAS REPO GROUP ROUTES
 
        m.connect("edit_repo_group", "/repo_groups/{group_name:.*?}/edit",
 
                  action="edit",
 
                  conditions=dict(method=["GET"], function=check_group))
 

	
 
        m.connect("edit_repo_group_advanced", "/repo_groups/{group_name:.*?}/edit/advanced",
 
                  action="edit_repo_group_advanced",
 
                  conditions=dict(method=["GET"], function=check_group))
 

	
 
        m.connect("edit_repo_group_perms", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="edit_repo_group_perms",
 
                  conditions=dict(method=["GET"], function=check_group))
 
        m.connect("edit_repo_group_perms_update", "/repo_groups/{group_name:.*?}/edit/permissions",
 
                  action="update_perms",
 
                  conditions=dict(method=["POST"], function=check_group))
 
        m.connect("edit_repo_group_perms_delete", "/repo_groups/{group_name:.*?}/edit/permissions/delete",
 
                  action="delete_perms",
 
                  conditions=dict(method=["POST"], function=check_group))
 

	
 
        m.connect("delete_repo_group", "/repo_groups/{group_name:.*?}/delete",
 
                  action="delete", conditions=dict(method=["POST"],
 
                                                   function=check_group_skip_path))
 

	
 

	
 
    #ADMIN USER ROUTES
 
    # ADMIN USER ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/users') as m:
 
        m.connect("new_user", "/users/new",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("users", "/users",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("formatted_users", "/users.{format}",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_user", "/users/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_user", "/users/{id}",
 
                  action="update", conditions=dict(method=["POST"]))
 
        m.connect("delete_user", "/users/{id}/delete",
 
                  action="delete", conditions=dict(method=["POST"]))
 
        m.connect("edit_user", "/users/{id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]))
 

	
 
        #EXTRAS USER ROUTES
 
        # EXTRAS USER ROUTES
 
        m.connect("edit_user_advanced", "/users/{id}/edit/advanced",
 
                  action="edit_advanced", conditions=dict(method=["GET"]))
 

	
 
        m.connect("edit_user_api_keys", "/users/{id}/edit/api_keys",
 
                  action="edit_api_keys", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_api_keys_update", "/users/{id}/edit/api_keys",
 
                  action="add_api_key", conditions=dict(method=["POST"]))
 
        m.connect("edit_user_api_keys_delete", "/users/{id}/edit/api_keys/delete",
 
                  action="delete_api_key", conditions=dict(method=["POST"]))
 

	
 
        m.connect("edit_user_perms", "/users/{id}/edit/permissions",
 
                  action="edit_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_perms_update", "/users/{id}/edit/permissions",
 
                  action="update_perms", conditions=dict(method=["POST"]))
 

	
 
        m.connect("edit_user_emails", "/users/{id}/edit/emails",
 
                  action="edit_emails", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_emails_update", "/users/{id}/edit/emails",
 
                  action="add_email", conditions=dict(method=["POST"]))
 
        m.connect("edit_user_emails_delete", "/users/{id}/edit/emails/delete",
 
                  action="delete_email", conditions=dict(method=["POST"]))
 

	
 
        m.connect("edit_user_ips", "/users/{id}/edit/ips",
 
                  action="edit_ips", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_ips_update", "/users/{id}/edit/ips",
 
                  action="add_ip", conditions=dict(method=["POST"]))
 
        m.connect("edit_user_ips_delete", "/users/{id}/edit/ips/delete",
 
                  action="delete_ip", conditions=dict(method=["POST"]))
 

	
 
    #ADMIN USER GROUPS REST ROUTES
 
    # ADMIN USER GROUPS REST ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/user_groups') as m:
 
        m.connect("users_groups", "/user_groups",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("users_groups", "/user_groups",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_users_group", "/user_groups/new",
 
                  action="new", conditions=dict(method=["GET"]))
 
        m.connect("update_users_group", "/user_groups/{id}",
 
                  action="update", conditions=dict(method=["POST"]))
 
        m.connect("delete_users_group", "/user_groups/{id}/delete",
 
                  action="delete", conditions=dict(method=["POST"]))
 
        m.connect("edit_users_group", "/user_groups/{id}/edit",
 
                  action="edit", conditions=dict(method=["GET"]),
 
                  function=check_user_group)
 

	
 
        #EXTRAS USER GROUP ROUTES
 
        # EXTRAS USER GROUP ROUTES
 
        m.connect("edit_user_group_default_perms", "/user_groups/{id}/edit/default_perms",
 
                  action="edit_default_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_group_default_perms_update", "/user_groups/{id}/edit/default_perms",
 
                  action="update_default_perms", conditions=dict(method=["POST"]))
 

	
 

	
 
        m.connect("edit_user_group_perms", "/user_groups/{id}/edit/perms",
 
                  action="edit_perms", conditions=dict(method=["GET"]))
 
        m.connect("edit_user_group_perms_update", "/user_groups/{id}/edit/perms",
 
                  action="update_perms", conditions=dict(method=["POST"]))
 
        m.connect("edit_user_group_perms_delete", "/user_groups/{id}/edit/perms/delete",
 
                  action="delete_perms", conditions=dict(method=["POST"]))
 

	
 
        m.connect("edit_user_group_advanced", "/user_groups/{id}/edit/advanced",
 
                  action="edit_advanced", conditions=dict(method=["GET"]))
 

	
 
        m.connect("edit_user_group_members", "/user_groups/{id}/edit/members",
 
                  action="edit_members", conditions=dict(method=["GET"]))
 

	
 

	
 

	
 
    #ADMIN PERMISSIONS ROUTES
 
    # ADMIN PERMISSIONS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/permissions') as m:
 
        m.connect("admin_permissions", "/permissions",
 
                  action="permission_globals", conditions=dict(method=["POST"]))
 
        m.connect("admin_permissions", "/permissions",
 
                  action="permission_globals", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_permissions_ips", "/permissions/ips",
 
                  action="permission_ips", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_permissions_perms", "/permissions/perms",
 
                  action="permission_perms", conditions=dict(method=["GET"]))
 

	
 

	
 
    #ADMIN DEFAULTS ROUTES
 
    # ADMIN DEFAULTS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/defaults') as m:
 
        m.connect('defaults', 'defaults',
 
                  action="index")
 
        m.connect('defaults_update', 'defaults/{id}/update',
 
                  action="update", conditions=dict(method=["POST"]))
 

	
 
    #ADMIN AUTH SETTINGS
 
    # ADMIN AUTH SETTINGS
 
    rmap.connect('auth_settings', '%s/auth' % ADMIN_PREFIX,
 
                 controller='admin/auth_settings', action='auth_settings',
 
                 conditions=dict(method=["POST"]))
 
    rmap.connect('auth_home', '%s/auth' % ADMIN_PREFIX,
 
                 controller='admin/auth_settings')
 

	
 
    #ADMIN SETTINGS ROUTES
 
    # ADMIN SETTINGS ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/settings') as m:
 
        m.connect("admin_settings", "/settings",
 
                  action="settings_vcs", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings", "/settings",
 
                  action="settings_vcs", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_mapping", "/settings/mapping",
 
                  action="settings_mapping", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_mapping", "/settings/mapping",
 
                  action="settings_mapping", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_global", "/settings/global",
 
                  action="settings_global", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_global", "/settings/global",
 
                  action="settings_global", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_visual", "/settings/visual",
 
                  action="settings_visual", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_visual", "/settings/visual",
 
                  action="settings_visual", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_email", "/settings/email",
 
                  action="settings_email", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_email", "/settings/email",
 
                  action="settings_email", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_hooks_delete", "/settings/hooks/delete",
 
                  action="settings_hooks", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_hooks", "/settings/hooks",
 
                  action="settings_hooks", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_search", "/settings/search",
 
                  action="settings_search", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_search", "/settings/search",
 
                  action="settings_search", conditions=dict(method=["GET"]))
 

	
 
        m.connect("admin_settings_system", "/settings/system",
 
                  action="settings_system", conditions=dict(method=["POST"]))
 
        m.connect("admin_settings_system", "/settings/system",
 
                  action="settings_system", conditions=dict(method=["GET"]))
 
        m.connect("admin_settings_system_update", "/settings/system/updates",
 
                  action="settings_system_update", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN MY ACCOUNT
 
    # ADMIN MY ACCOUNT
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/my_account') as m:
 

	
 
        m.connect("my_account", "/my_account",
 
                  action="my_account", conditions=dict(method=["GET"]))
 
        m.connect("my_account", "/my_account",
 
                  action="my_account", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_password", "/my_account/password",
 
                  action="my_account_password", conditions=dict(method=["GET"]))
 
        m.connect("my_account_password", "/my_account/password",
 
                  action="my_account_password", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_repos", "/my_account/repos",
 
                  action="my_account_repos", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_watched", "/my_account/watched",
 
                  action="my_account_watched", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_perms", "/my_account/perms",
 
                  action="my_account_perms", conditions=dict(method=["GET"]))
 

	
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails", conditions=dict(method=["GET"]))
 
        m.connect("my_account_emails", "/my_account/emails",
 
                  action="my_account_emails_add", conditions=dict(method=["POST"]))
 
        m.connect("my_account_emails_delete", "/my_account/emails/delete",
 
                  action="my_account_emails_delete", conditions=dict(method=["POST"]))
 

	
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys", conditions=dict(method=["GET"]))
 
        m.connect("my_account_api_keys", "/my_account/api_keys",
 
                  action="my_account_api_keys_add", conditions=dict(method=["POST"]))
 
        m.connect("my_account_api_keys_delete", "/my_account/api_keys/delete",
 
                  action="my_account_api_keys_delete", conditions=dict(method=["POST"]))
 

	
 
    #NOTIFICATION REST ROUTES
 
    # NOTIFICATION REST ROUTES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/notifications') as m:
 
        m.connect("notifications", "/notifications",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("notifications_mark_all_read", "/notifications/mark_all_read",
 
                  action="mark_all_read", conditions=dict(method=["GET"]))
 
        m.connect("formatted_notifications", "/notifications.{format}",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("notification_update", "/notifications/{notification_id}/update",
 
                  action="update", conditions=dict(method=["POST"]))
 
        m.connect("notification_delete", "/notifications/{notification_id}/delete",
 
                  action="delete", conditions=dict(method=["POST"]))
 
        m.connect("notification", "/notifications/{notification_id}",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_notification", "/notifications/{notification_id}.{format}",
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN GIST
 
    # ADMIN GIST
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/gists') as m:
 
        m.connect("gists", "/gists",
 
                  action="create", conditions=dict(method=["POST"]))
 
        m.connect("gists", "/gists",
 
                  action="index", conditions=dict(method=["GET"]))
 
        m.connect("new_gist", "/gists/new",
 
                  action="new", conditions=dict(method=["GET"]))
 

	
 

	
 
        m.connect("gist_delete", "/gists/{gist_id}/delete",
 
                  action="delete", conditions=dict(method=["POST"]))
 
        m.connect("edit_gist", "/gists/{gist_id}/edit",
 
                  action="edit", conditions=dict(method=["GET", "POST"]))
 
        m.connect("edit_gist_check_revision", "/gists/{gist_id}/edit/check_revision",
 
                  action="check_revision", conditions=dict(method=["POST"]))
 

	
 

	
 
        m.connect("gist", "/gists/{gist_id}",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("gist_rev", "/gists/{gist_id}/{revision}",
 
                  revision="tip",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_gist", "/gists/{gist_id}/{revision}/{format}",
 
                  revision="tip",
 
                  action="show", conditions=dict(method=["GET"]))
 
        m.connect("formatted_gist_file", "/gists/{gist_id}/{revision}/{format}/{f_path:.*}",
 
                  revision='tip',
 
                  action="show", conditions=dict(method=["GET"]))
 

	
 
    #ADMIN MAIN PAGES
 
    # ADMIN MAIN PAGES
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX,
 
                        controller='admin/admin') as m:
 
        m.connect('admin_home', '', action='index')
 
        m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
 
                  action='add_repo')
 
    #==========================================================================
 
    # API V2
 
    #==========================================================================
 
    with rmap.submapper(path_prefix=ADMIN_PREFIX, controller='api/api',
 
                        action='_dispatch') as m:
 
        m.connect('api', '/api')
 

	
 
    #USER JOURNAL
 
    # USER JOURNAL
 
    rmap.connect('journal', '%s/journal' % ADMIN_PREFIX,
 
                 controller='journal', action='index')
 
    rmap.connect('journal_rss', '%s/journal/rss' % ADMIN_PREFIX,
 
                 controller='journal', action='journal_rss')
 
    rmap.connect('journal_atom', '%s/journal/atom' % ADMIN_PREFIX,
 
                 controller='journal', action='journal_atom')
 

	
 
    rmap.connect('public_journal', '%s/public_journal' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal")
 

	
 
    rmap.connect('public_journal_rss', '%s/public_journal/rss' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal_rss")
 

	
 
    rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % ADMIN_PREFIX,
 
                 controller='journal', action="public_journal_rss")
 

	
 
    rmap.connect('public_journal_atom',
 
                 '%s/public_journal/atom' % ADMIN_PREFIX, controller='journal',
 
                 action="public_journal_atom")
 

	
 
    rmap.connect('public_journal_atom_old',
 
                 '%s/public_journal_atom' % ADMIN_PREFIX, controller='journal',
 
                 action="public_journal_atom")
 

	
 
    rmap.connect('toggle_following', '%s/toggle_following' % ADMIN_PREFIX,
 
                 controller='journal', action='toggle_following',
 
                 conditions=dict(method=["POST"]))
 

	
 
    #SEARCH
 
    # SEARCH
 
    rmap.connect('search', '%s/search' % ADMIN_PREFIX, controller='search',)
 
    rmap.connect('search_repo_admin', '%s/search/{repo_name:.*}' % ADMIN_PREFIX,
 
                 controller='search',
 
                 conditions=dict(function=check_repo))
 
    rmap.connect('search_repo', '/{repo_name:.*?}/search',
 
                 controller='search',
 
                 conditions=dict(function=check_repo),
 
                 )
 

	
 
    #LOGIN/LOGOUT/REGISTER/SIGN IN
 
    # LOGIN/LOGOUT/REGISTER/SIGN IN
 
    rmap.connect('authentication_token', '%s/authentication_token' % ADMIN_PREFIX, controller='login', action='authentication_token')
 
    rmap.connect('login_home', '%s/login' % ADMIN_PREFIX, controller='login')
 
    rmap.connect('logout_home', '%s/logout' % ADMIN_PREFIX, controller='login',
 
                 action='logout')
 

	
 
    rmap.connect('register', '%s/register' % ADMIN_PREFIX, controller='login',
 
                 action='register')
 

	
 
    rmap.connect('reset_password', '%s/password_reset' % ADMIN_PREFIX,
 
                 controller='login', action='password_reset')
 

	
 
    rmap.connect('reset_password_confirmation',
 
                 '%s/password_reset_confirmation' % ADMIN_PREFIX,
 
                 controller='login', action='password_reset_confirmation')
 

	
 
    #FEEDS
 
    # FEEDS
 
    rmap.connect('rss_feed_home', '/{repo_name:.*?}/feed/rss',
 
                controller='feed', action='rss',
 
                conditions=dict(function=check_repo))
 

	
 
    rmap.connect('atom_feed_home', '/{repo_name:.*?}/feed/atom',
 
                controller='feed', action='atom',
 
                conditions=dict(function=check_repo))
 

	
 
    #==========================================================================
 
    # REPOSITORY ROUTES
 
    #==========================================================================
 
    rmap.connect('repo_creating_home', '/{repo_name:.*?}/repo_creating',
 
                controller='admin/repos', action='repo_creating')
 
    rmap.connect('repo_check_home', '/{repo_name:.*?}/crepo_check',
 
                controller='admin/repos', action='repo_check')
 

	
 
    rmap.connect('summary_home', '/{repo_name:.*?}',
 
                controller='summary',
 
                conditions=dict(function=check_repo))
 

	
 
    # must be here for proper group/repo catching
 
    rmap.connect('repos_group_home', '/{group_name:.*}',
 
                controller='admin/repo_groups', action="show_by_name",
 
                conditions=dict(function=check_group))
 
@@ -530,92 +523,89 @@ def make_map(config):
 
    rmap.connect("edit_repo", "/{repo_name:.*?}/settings",
 
                 controller='admin/repos', action="edit",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_perms", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_perms_update", "/{repo_name:.*?}/settings/permissions",
 
                 controller='admin/repos', action="edit_permissions_update",
 
                 conditions=dict(method=["POST"], function=check_repo))
 
    rmap.connect("edit_repo_perms_revoke", "/{repo_name:.*?}/settings/permissions/delete",
 
                 controller='admin/repos', action="edit_permissions_revoke",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_fields", "/{repo_name:.*?}/settings/fields",
 
                 controller='admin/repos', action="edit_fields",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect('create_repo_fields', "/{repo_name:.*?}/settings/fields/new",
 
                 controller='admin/repos', action="create_repo_field",
 
                 conditions=dict(method=["POST"], function=check_repo))
 
    rmap.connect('delete_repo_fields', "/{repo_name:.*?}/settings/fields/{field_id}/delete",
 
                 controller='admin/repos', action="delete_repo_field",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_advanced", "/{repo_name:.*?}/settings/advanced",
 
                 controller='admin/repos', action="edit_advanced",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_locking", "/{repo_name:.*?}/settings/advanced/locking",
 
                 controller='admin/repos', action="edit_advanced_locking",
 
                 conditions=dict(method=["POST"], function=check_repo))
 
    rmap.connect('toggle_locking', "/{repo_name:.*?}/settings/advanced/locking_toggle",
 
                 controller='admin/repos', action="toggle_locking",
 
                 conditions=dict(method=["GET"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_journal", "/{repo_name:.*?}/settings/advanced/journal",
 
                 controller='admin/repos', action="edit_advanced_journal",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_advanced_fork", "/{repo_name:.*?}/settings/advanced/fork",
 
                 controller='admin/repos', action="edit_advanced_fork",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_caches", "/{repo_name:.*?}/settings/caches",
 
                 controller='admin/repos', action="edit_caches",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("update_repo_caches", "/{repo_name:.*?}/settings/caches",
 
                 controller='admin/repos', action="edit_caches",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 

	
 
    rmap.connect("edit_repo_remote", "/{repo_name:.*?}/settings/remote",
 
                 controller='admin/repos', action="edit_remote",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_remote_update", "/{repo_name:.*?}/settings/remote",
 
                 controller='admin/repos', action="edit_remote",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 
    rmap.connect("edit_repo_statistics", "/{repo_name:.*?}/settings/statistics",
 
                 controller='admin/repos', action="edit_statistics",
 
                 conditions=dict(method=["GET"], function=check_repo))
 
    rmap.connect("edit_repo_statistics_update", "/{repo_name:.*?}/settings/statistics",
 
                 controller='admin/repos', action="edit_statistics",
 
                 conditions=dict(method=["POST"], function=check_repo))
 

	
 
    #still working url for backward compat.
 
    # still working url for backward compat.
 
    rmap.connect('raw_changeset_home_depraced',
 
                 '/{repo_name:.*?}/raw-changeset/{revision}',
 
                 controller='changeset', action='changeset_raw',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    ## new URLs
 
    rmap.connect('changeset_raw_home',
 
                 '/{repo_name:.*?}/changeset-diff/{revision}',
 
                 controller='changeset', action='changeset_raw',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_patch_home',
 
                 '/{repo_name:.*?}/changeset-patch/{revision}',
 
                 controller='changeset', action='changeset_patch',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_download_home',
 
                 '/{repo_name:.*?}/changeset-download/{revision}',
 
                 controller='changeset', action='changeset_download',
 
                 revision='tip', conditions=dict(function=check_repo))
 

	
 
    rmap.connect('changeset_comment',
 
                 '/{repo_name:.*?}/changeset-comment/{revision}',
 
                controller='changeset', revision='tip', action='comment',
 
@@ -791,28 +781,30 @@ def make_map(config):
 
                 controller='followers', action='followers',
 
                 conditions=dict(function=check_repo))
 

	
 
    return rmap
 

	
 

	
 
class UrlGenerator(object):
 
    """Emulate pylons.url in providing a wrapper around routes.url
 

	
 
    This code was added during migration from Pylons to Turbogears2. Pylons
 
    already provided a wrapper like this, but Turbogears2 does not.
 

	
 
    When the routing of Kallithea is changed to use less Routes and more
 
    Turbogears2-style routing, this class may disappear or change.
 

	
 
    url() (the __call__ method) returns the URL based on a route name and
 
    arguments.
 
    url.current() returns the URL of the current page with arguments applied.
 

	
 
    Refer to documentation of Routes for details:
 
    https://routes.readthedocs.io/en/latest/generating.html#generation
 
    """
 
    def __call__(self, *args, **kwargs):
 
        return request.environ['routes.url'](*args, **kwargs)
 

	
 
    def current(self, *args, **kwargs):
 
        return request.environ['routes.url'].current(*args, **kwargs)
 

	
 

	
 
url = UrlGenerator()
kallithea/controllers/admin/admin.py
Show inline comments
 
@@ -44,106 +44,106 @@ from kallithea.lib.indexers import JOURN
 
from kallithea.lib.page import Page
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _journal_filter(user_log, search_term):
 
    """
 
    Filters sqlalchemy user_log based on search_term with whoosh Query language
 
    http://packages.python.org/Whoosh/querylang.html
 

	
 
    :param user_log:
 
    :param search_term:
 
    """
 
    log.debug('Initial search term: %r', search_term)
 
    qry = None
 
    if search_term:
 
        qp = QueryParser('repository', schema=JOURNAL_SCHEMA)
 
        qp.add_plugin(DateParserPlugin())
 
        qry = qp.parse(unicode(search_term))
 
        log.debug('Filtering using parsed query %r', qry)
 

	
 
    def wildcard_handler(col, wc_term):
 
        if wc_term.startswith('*') and not wc_term.endswith('*'):
 
            #postfix == endswith
 
            # postfix == endswith
 
            wc_term = remove_prefix(wc_term, prefix='*')
 
            return func.lower(col).endswith(func.lower(wc_term))
 
        elif wc_term.startswith('*') and wc_term.endswith('*'):
 
            #wildcard == ilike
 
            # wildcard == ilike
 
            wc_term = remove_prefix(wc_term, prefix='*')
 
            wc_term = remove_suffix(wc_term, suffix='*')
 
            return func.lower(col).contains(func.lower(wc_term))
 

	
 
    def get_filterion(field, val, term):
 

	
 
        if field == 'repository':
 
            field = getattr(UserLog, 'repository_name')
 
        elif field == 'ip':
 
            field = getattr(UserLog, 'user_ip')
 
        elif field == 'date':
 
            field = getattr(UserLog, 'action_date')
 
        elif field == 'username':
 
            field = getattr(UserLog, 'username')
 
        else:
 
            field = getattr(UserLog, field)
 
        log.debug('filter field: %s val=>%s', field, val)
 

	
 
        #sql filtering
 
        # sql filtering
 
        if isinstance(term, query.Wildcard):
 
            return wildcard_handler(field, val)
 
        elif isinstance(term, query.Prefix):
 
            return func.lower(field).startswith(func.lower(val))
 
        elif isinstance(term, query.DateRange):
 
            return and_(field >= val[0], field <= val[1])
 
        return func.lower(field) == func.lower(val)
 

	
 
    if isinstance(qry, (query.And, query.Term, query.Prefix, query.Wildcard,
 
                        query.DateRange)):
 
        if not isinstance(qry, query.And):
 
            qry = [qry]
 
        for term in qry:
 
            field = term.fieldname
 
            val = (term.text if not isinstance(term, query.DateRange)
 
                   else [term.startdate, term.enddate])
 
            user_log = user_log.filter(get_filterion(field, val, term))
 
    elif isinstance(qry, query.Or):
 
        filters = []
 
        for term in qry:
 
            field = term.fieldname
 
            val = (term.text if not isinstance(term, query.DateRange)
 
                   else [term.startdate, term.enddate])
 
            filters.append(get_filterion(field, val, term))
 
        user_log = user_log.filter(or_(*filters))
 

	
 
    return user_log
 

	
 

	
 
class AdminController(BaseController):
 

	
 
    @LoginRequired()
 
    def _before(self, *args, **kwargs):
 
        super(AdminController, self)._before(*args, **kwargs)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def index(self):
 
        users_log = UserLog.query() \
 
                .options(joinedload(UserLog.user)) \
 
                .options(joinedload(UserLog.repository))
 

	
 
        #FILTERING
 
        # FILTERING
 
        c.search_term = request.GET.get('filter')
 
        users_log = _journal_filter(users_log, c.search_term)
 

	
 
        users_log = users_log.order_by(UserLog.action_date.desc())
 

	
 
        p = safe_int(request.GET.get('page'), 1)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator)
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return render('admin/admin_log.html')
 

	
 
        return render('admin/admin.html')
kallithea/controllers/admin/gists.py
Show inline comments
 
@@ -85,49 +85,49 @@ class GistsController(BaseController):
 
                             .filter(Gist.owner_id == request.authuser.user_id)
 

	
 
        # MY public+private
 
        elif c.show_private and c.show_public:
 
            gists = gists.filter(or_(Gist.gist_type == Gist.GIST_PUBLIC,
 
                                     Gist.gist_type == Gist.GIST_PRIVATE)) \
 
                             .filter(Gist.owner_id == request.authuser.user_id)
 

	
 
        # default show ALL public gists
 
        if not c.show_public and not c.show_private:
 
            gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC)
 

	
 
        c.gists = gists
 
        p = safe_int(request.GET.get('page'), 1)
 
        c.gists_pager = Page(c.gists, page=p, items_per_page=10)
 
        return render('admin/gists/index.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def create(self):
 
        self.__load_defaults()
 
        gist_form = GistForm([x[0] for x in c.lifetime_values])()
 
        try:
 
            form_result = gist_form.to_python(dict(request.POST))
 
            #TODO: multiple files support, from the form
 
            # TODO: multiple files support, from the form
 
            filename = form_result['filename'] or Gist.DEFAULT_FILENAME
 
            nodes = {
 
                filename: {
 
                    'content': form_result['content'],
 
                    'lexer': form_result['mimetype']  # None is autodetect
 
                }
 
            }
 
            _public = form_result['public']
 
            gist_type = Gist.GIST_PUBLIC if _public else Gist.GIST_PRIVATE
 
            gist = GistModel().create(
 
                description=form_result['description'],
 
                owner=request.authuser.user_id,
 
                gist_mapping=nodes,
 
                gist_type=gist_type,
 
                lifetime=form_result['lifetime']
 
            )
 
            Session().commit()
 
            new_gist_id = gist.gist_access_id
 
        except formencode.Invalid as errors:
 
            defaults = errors.value
 

	
 
            return formencode.htmlfill.render(
 
                render('admin/gists/new.html'),
 
                defaults=defaults,
 
@@ -229,32 +229,32 @@ class GistsController(BaseController):
 
                h.flash(_('Successfully updated gist content'), category='success')
 
            except NodeNotChangedError:
 
                # raised if nothing was changed in repo itself. We anyway then
 
                # store only DB stuff for gist
 
                Session().commit()
 
                h.flash(_('Successfully updated gist data'), category='success')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during update of gist %s') % gist_id,
 
                        category='error')
 

	
 
            raise HTTPFound(location=url('gist', gist_id=gist_id))
 

	
 
        return rendered
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @jsonify
 
    def check_revision(self, gist_id):
 
        c.gist = Gist.get_or_404(gist_id)
 
        last_rev = c.gist.scm_instance.get_changeset()
 
        success = True
 
        revision = request.POST.get('revision')
 

	
 
        ##TODO: maybe move this to model ?
 
        # TODO: maybe move this to model ?
 
        if revision != last_rev.raw_id:
 
            log.error('Last revision %s is different than submitted %s',
 
                      revision, last_rev)
 
            # our gist has newer version than we
 
            success = False
 

	
 
        return {'success': success}
kallithea/controllers/admin/my_account.py
Show inline comments
 
@@ -110,99 +110,99 @@ class MyAccountController(BaseController
 
                post_data['password_confirmation'] = ''
 
                form_result = _form.to_python(post_data)
 
                # skip updating those attrs for my account
 
                skip_attrs = ['admin', 'active', 'extern_type', 'extern_name',
 
                              'new_password', 'password_confirmation',
 
                             ] + managed_fields
 

	
 
                UserModel().update(request.authuser.user_id, form_result,
 
                                   skip_attrs=skip_attrs)
 
                h.flash(_('Your account was updated successfully'),
 
                        category='success')
 
                Session().commit()
 
                update = True
 

	
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                    render('admin/my_account/my_account.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
 
                    encoding="UTF-8",
 
                    force_defaults=False)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during update of user %s') \
 
                h.flash(_('Error occurred during update of user %s')
 
                        % form_result.get('username'), category='error')
 
        if update:
 
            raise HTTPFound(location='my_account')
 
        return htmlfill.render(
 
            render('admin/my_account/my_account.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def my_account_password(self):
 
        c.active = 'password'
 
        self.__load_data()
 

	
 
        managed_fields = auth_modules.get_managed_fields(c.user)
 
        c.can_change_password = 'password' not in managed_fields
 

	
 
        if request.POST and c.can_change_password:
 
            _form = PasswordChangeForm(request.authuser.username)()
 
            try:
 
                form_result = _form.to_python(request.POST)
 
                UserModel().update(request.authuser.user_id, form_result)
 
                Session().commit()
 
                h.flash(_("Successfully updated password"), category='success')
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                    render('admin/my_account/my_account.html'),
 
                    defaults=errors.value,
 
                    errors=errors.error_dict or {},
 
                    prefix_error=False,
 
                    encoding="UTF-8",
 
                    force_defaults=False)
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred during update of user password'),
 
                        category='error')
 
        return render('admin/my_account/my_account.html')
 

	
 
    def my_account_repos(self):
 
        c.active = 'repos'
 
        self.__load_data()
 

	
 
        #data used to render the grid
 
        # data used to render the grid
 
        c.data = self._load_my_repos_data()
 
        return render('admin/my_account/my_account.html')
 

	
 
    def my_account_watched(self):
 
        c.active = 'watched'
 
        self.__load_data()
 

	
 
        #data used to render the grid
 
        # data used to render the grid
 
        c.data = self._load_my_repos_data(watched=True)
 
        return render('admin/my_account/my_account.html')
 

	
 
    def my_account_perms(self):
 
        c.active = 'perms'
 
        self.__load_data()
 
        c.perm_user = AuthUser(user_id=request.authuser.user_id)
 

	
 
        return render('admin/my_account/my_account.html')
 

	
 
    def my_account_emails(self):
 
        c.active = 'emails'
 
        self.__load_data()
 

	
 
        c.user_email_map = UserEmailMap.query() \
 
            .filter(UserEmailMap.user == c.user).all()
 
        return render('admin/my_account/my_account.html')
 

	
 
    def my_account_emails_add(self):
 
        email = request.POST.get('new_email')
 

	
 
        try:
 
            UserModel().add_extra_email(request.authuser.user_id, email)
 
            Session().commit()
kallithea/controllers/admin/repo_groups.py
Show inline comments
 
@@ -145,153 +145,153 @@ class RepoGroupsController(BaseControlle
 
            "startIndex": 0,
 
            "sort": None,
 
            "dir": "asc",
 
            "records": repo_groups_data
 
        }
 

	
 
        return render('admin/repo_groups/repo_groups.html')
 

	
 
    def create(self):
 
        self.__load_defaults()
 

	
 
        # permissions for can create group based on parent_id are checked
 
        # here in the Form
 
        repo_group_form = RepoGroupForm(repo_groups=c.repo_groups)
 
        try:
 
            form_result = repo_group_form.to_python(dict(request.POST))
 
            gr = RepoGroupModel().create(
 
                group_name=form_result['group_name'],
 
                group_description=form_result['group_description'],
 
                parent=form_result['parent_group_id'],
 
                owner=request.authuser.user_id, # TODO: make editable
 
                copy_permissions=form_result['group_copy_permissions']
 
            )
 
            Session().commit()
 
            #TODO: in future action_logger(, '', '', '')
 
            # TODO: in future action_logger(, '', '', '')
 
        except formencode.Invalid as errors:
 
            return htmlfill.render(
 
                render('admin/repo_groups/repo_group_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during creation of repository group %s') \
 
            h.flash(_('Error occurred during creation of repository group %s')
 
                    % request.POST.get('group_name'), category='error')
 
            parent_group_id = form_result['parent_group_id']
 
            #TODO: maybe we should get back to the main view, not the admin one
 
            # TODO: maybe we should get back to the main view, not the admin one
 
            raise HTTPFound(location=url('repos_groups', parent_group=parent_group_id))
 
        h.flash(_('Created repository group %s') % gr.group_name,
 
                category='success')
 
        raise HTTPFound(location=url('repos_group_home', group_name=gr.group_name))
 

	
 
    def new(self):
 
        if HasPermissionAny('hg.admin')('group create'):
 
            #we're global admin, we're ok and we can create TOP level groups
 
            # we're global admin, we're ok and we can create TOP level groups
 
            pass
 
        else:
 
            # we pass in parent group into creation form, thus we know
 
            # what would be the group, we can check perms here !
 
            group_id = safe_int(request.GET.get('parent_group'))
 
            group = RepoGroup.get(group_id) if group_id else None
 
            group_name = group.group_name if group else None
 
            if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'):
 
                pass
 
            else:
 
                raise HTTPForbidden()
 

	
 
        self.__load_defaults()
 
        return render('admin/repo_groups/repo_group_add.html')
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def update(self, group_name):
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 
        self.__load_defaults(extras=[c.repo_group.parent_group],
 
                             exclude=[c.repo_group])
 

	
 
        # TODO: kill allow_empty_group - it is only used for redundant form validation!
 
        if HasPermissionAny('hg.admin')('group edit'):
 
            #we're global admin, we're ok and we can create TOP level groups
 
            # we're global admin, we're ok and we can create TOP level groups
 
            allow_empty_group = True
 
        elif not c.repo_group.parent_group:
 
            allow_empty_group = True
 
        else:
 
            allow_empty_group = False
 
        repo_group_form = RepoGroupForm(
 
            edit=True,
 
            old_data=c.repo_group.get_dict(),
 
            repo_groups=c.repo_groups,
 
            can_create_in_root=allow_empty_group,
 
        )()
 
        try:
 
            form_result = repo_group_form.to_python(dict(request.POST))
 

	
 
            new_gr = RepoGroupModel().update(group_name, form_result)
 
            Session().commit()
 
            h.flash(_('Updated repository group %s') \
 
            h.flash(_('Updated repository group %s')
 
                    % form_result['group_name'], category='success')
 
            # we now have new name !
 
            group_name = new_gr.group_name
 
            #TODO: in future action_logger(, '', '', '')
 
            # TODO: in future action_logger(, '', '', '')
 
        except formencode.Invalid as errors:
 
            c.active = 'settings'
 
            return htmlfill.render(
 
                render('admin/repo_groups/repo_group_edit.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of repository group %s') \
 
            h.flash(_('Error occurred during update of repository group %s')
 
                    % request.POST.get('group_name'), category='error')
 

	
 
        raise HTTPFound(location=url('edit_repo_group', group_name=group_name))
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def delete(self, group_name):
 
        gr = c.repo_group = RepoGroup.guess_instance(group_name)
 
        repos = gr.repositories.all()
 
        if repos:
 
            h.flash(_('This group contains %s repositories and cannot be '
 
                      'deleted') % len(repos), category='warning')
 
            raise HTTPFound(location=url('repos_groups'))
 

	
 
        children = gr.children.all()
 
        if children:
 
            h.flash(_('This group contains %s subgroups and cannot be deleted'
 
                      % (len(children))), category='warning')
 
            raise HTTPFound(location=url('repos_groups'))
 

	
 
        try:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            h.flash(_('Removed repository group %s') % group_name,
 
                    category='success')
 
            #TODO: in future action_logger(, '', '', '')
 
            # TODO: in future action_logger(, '', '', '')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during deletion of repository group %s')
 
                    % group_name, category='error')
 

	
 
        if gr.parent_group:
 
            raise HTTPFound(location=url('repos_group_home', group_name=gr.parent_group.group_name))
 
        raise HTTPFound(location=url('repos_groups'))
 

	
 
    def show_by_name(self, group_name):
 
        """
 
        This is a proxy that does a lookup group_name -> id, and shows
 
        the group by id view instead
 
        """
 
        group_name = group_name.rstrip('/')
 
        id_ = RepoGroup.get_by_group_name(group_name)
 
        if id_:
 
            return self.show(group_name)
 
        raise HTTPNotFound
 

	
 
    @HasRepoGroupPermissionLevelDecorator('read')
 
    def show(self, group_name):
 
        c.active = 'settings'
 

	
 
@@ -348,49 +348,49 @@ class RepoGroupsController(BaseControlle
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def update_perms(self, group_name):
 
        """
 
        Update permissions for given repository group
 

	
 
        :param group_name:
 
        """
 

	
 
        c.repo_group = RepoGroup.guess_instance(group_name)
 
        valid_recursive_choices = ['none', 'repos', 'groups', 'all']
 
        form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST)
 
        if not request.authuser.is_admin:
 
            if self._revoke_perms_on_yourself(form_result):
 
                msg = _('Cannot revoke permission for yourself as admin')
 
                h.flash(msg, category='warning')
 
                raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name))
 
        recursive = form_result['recursive']
 
        # iterate over all members(if in recursive mode) of this groups and
 
        # set the permissions !
 
        # this can be potentially heavy operation
 
        RepoGroupModel()._update_permissions(c.repo_group,
 
                                             form_result['perms_new'],
 
                                             form_result['perms_updates'],
 
                                             recursive)
 
        #TODO: implement this
 
        # TODO: implement this
 
        #action_logger(request.authuser, 'admin_changed_repo_permissions',
 
        #              repo_name, request.ip_addr)
 
        Session().commit()
 
        h.flash(_('Repository group permissions updated'), category='success')
 
        raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name))
 

	
 
    @HasRepoGroupPermissionLevelDecorator('admin')
 
    def delete_perms(self, group_name):
 
        try:
 
            obj_type = request.POST.get('obj_type')
 
            obj_id = None
 
            if obj_type == 'user':
 
                obj_id = safe_int(request.POST.get('user_id'))
 
            elif obj_type == 'user_group':
 
                obj_id = safe_int(request.POST.get('user_group_id'))
 

	
 
            if not request.authuser.is_admin:
 
                if obj_type == 'user' and request.authuser.user_id == obj_id:
 
                    msg = _('Cannot revoke permission for yourself as admin')
 
                    h.flash(msg, category='warning')
 
                    raise Exception('revoke admin permission on self')
 
            recursive = request.POST.get('recursive', 'none')
 
            if obj_type == 'user':
 
                RepoGroupModel().delete_permission(repo_group=group_name,
kallithea/controllers/admin/repos.py
Show inline comments
 
@@ -83,49 +83,49 @@ class ReposController(BaseRepoController
 

	
 
        c.repo_groups = AvailableRepoGroupChoices(top_perms, repo_group_perm_level, extras)
 

	
 
        c.landing_revs_choices, c.landing_revs = ScmModel().get_repo_landing_revs(repo)
 

	
 
    def __load_data(self):
 
        """
 
        Load defaults settings for edit, and update
 
        """
 
        c.repo_info = self._load_repo()
 
        self.__load_defaults(c.repo_info)
 

	
 
        defaults = RepoModel()._get_defaults(c.repo_name)
 
        defaults['clone_uri'] = c.repo_info.clone_uri_hidden # don't show password
 

	
 
        return defaults
 

	
 
    def index(self, format='html'):
 
        _list = Repository.query(sorted=True).all()
 

	
 
        c.repos_list = RepoList(_list, perm_level='admin')
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=c.repos_list,
 
                                                   admin=True,
 
                                                   super_user_actions=True)
 
        #data used to render the grid
 
        # data used to render the grid
 
        c.data = repos_data
 

	
 
        return render('admin/repos/repos.html')
 

	
 
    @NotAnonymous()
 
    def create(self):
 
        self.__load_defaults()
 
        form_result = {}
 
        try:
 
            # CanWriteGroup validators checks permissions of this POST
 
            form_result = RepoForm(repo_groups=c.repo_groups,
 
                                   landing_revs=c.landing_revs_choices)() \
 
                            .to_python(dict(request.POST))
 

	
 
            # create is done sometimes async on celery, db transaction
 
            # management is handled there.
 
            task = RepoModel().create(form_result, request.authuser.user_id)
 
            task_id = task.task_id
 
        except formencode.Invalid as errors:
 
            log.info(errors)
 
            return htmlfill.render(
 
                render('admin/repos/repo_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
@@ -236,49 +236,49 @@ class ReposController(BaseRepoController
 
            form_result = _form.to_python(dict(request.POST))
 
            repo = repo_model.update(repo_name, **form_result)
 
            ScmModel().mark_for_invalidation(repo_name)
 
            h.flash(_('Repository %s updated successfully') % repo_name,
 
                    category='success')
 
            changed_name = repo.repo_name
 
            action_logger(request.authuser, 'admin_updated_repo',
 
                changed_name, request.ip_addr)
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            log.info(errors)
 
            defaults = self.__load_data()
 
            defaults.update(errors.value)
 
            c.users_array = repo_model.get_users_js()
 
            return htmlfill.render(
 
                render('admin/repos/repo_edit.html'),
 
                defaults=defaults,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of repository %s') \
 
            h.flash(_('Error occurred during update of repository %s')
 
                    % repo_name, category='error')
 
        raise HTTPFound(location=url('edit_repo', repo_name=changed_name))
 

	
 
    @HasRepoPermissionLevelDecorator('admin')
 
    def delete(self, repo_name):
 
        repo_model = RepoModel()
 
        repo = repo_model.get_by_repo_name(repo_name)
 
        if not repo:
 
            h.not_mapped_error(repo_name)
 
            raise HTTPFound(location=url('repos'))
 
        try:
 
            _forks = repo.forks.count()
 
            handle_forks = None
 
            if _forks and request.POST.get('forks'):
 
                do = request.POST['forks']
 
                if do == 'detach_forks':
 
                    handle_forks = 'detach'
 
                    h.flash(_('Detached %s forks') % _forks, category='success')
 
                elif do == 'delete_forks':
 
                    handle_forks = 'delete'
 
                    h.flash(_('Deleted %s forks') % _forks, category='success')
 
            repo_model.delete(repo, forks=handle_forks)
 
            action_logger(request.authuser, 'admin_deleted_repo',
 
                repo_name, request.ip_addr)
 
@@ -310,71 +310,71 @@ class ReposController(BaseRepoController
 
            render('admin/repos/repo_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasRepoPermissionLevelDecorator('admin')
 
    def edit_permissions(self, repo_name):
 
        c.repo_info = self._load_repo()
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 
        c.active = 'permissions'
 
        defaults = RepoModel()._get_defaults(repo_name)
 

	
 
        return htmlfill.render(
 
            render('admin/repos/repo_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    def edit_permissions_update(self, repo_name):
 
        form = RepoPermsForm()().to_python(request.POST)
 
        RepoModel()._update_permissions(repo_name, form['perms_new'],
 
                                        form['perms_updates'])
 
        #TODO: implement this
 
        # TODO: implement this
 
        #action_logger(request.authuser, 'admin_changed_repo_permissions',
 
        #              repo_name, request.ip_addr)
 
        Session().commit()
 
        h.flash(_('Repository permissions updated'), category='success')
 
        raise HTTPFound(location=url('edit_repo_perms', repo_name=repo_name))
 

	
 
    def edit_permissions_revoke(self, repo_name):
 
        try:
 
            obj_type = request.POST.get('obj_type')
 
            obj_id = None
 
            if obj_type == 'user':
 
                obj_id = safe_int(request.POST.get('user_id'))
 
            elif obj_type == 'user_group':
 
                obj_id = safe_int(request.POST.get('user_group_id'))
 

	
 
            if obj_type == 'user':
 
                RepoModel().revoke_user_permission(repo=repo_name, user=obj_id)
 
            elif obj_type == 'user_group':
 
                RepoModel().revoke_user_group_permission(
 
                    repo=repo_name, group_name=obj_id
 
                )
 
            #TODO: implement this
 
            # TODO: implement this
 
            #action_logger(request.authuser, 'admin_revoked_repo_permissions',
 
            #              repo_name, request.ip_addr)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during revoking of permission'),
 
                    category='error')
 
            raise HTTPInternalServerError()
 

	
 
    @HasRepoPermissionLevelDecorator('admin')
 
    def edit_fields(self, repo_name):
 
        c.repo_info = self._load_repo()
 
        c.repo_fields = RepositoryField.query() \
 
            .filter(RepositoryField.repository == c.repo_info).all()
 
        c.active = 'fields'
 
        if request.POST:
 

	
 
            raise HTTPFound(location=url('repo_edit_fields'))
 
        return render('admin/repos/repo_edit.html')
 

	
 
    @HasRepoPermissionLevelDecorator('admin')
 
    def create_repo_field(self, repo_name):
 
        try:
 
            form_result = RepoFieldForm()().to_python(dict(request.POST))
 
@@ -435,49 +435,48 @@ class ReposController(BaseRepoController
 
            force_defaults=False)
 

	
 
    @HasRepoPermissionLevelDecorator('admin')
 
    def edit_advanced_journal(self, repo_name):
 
        """
 
        Sets this repository to be visible in public journal,
 
        in other words asking default user to follow this repo
 

	
 
        :param repo_name:
 
        """
 

	
 
        try:
 
            repo_id = Repository.get_by_repo_name(repo_name).repo_id
 
            user_id = User.get_default_user().user_id
 
            self.scm_model.toggle_following_repo(repo_id, user_id)
 
            h.flash(_('Updated repository visibility in public journal'),
 
                    category='success')
 
            Session().commit()
 
        except Exception:
 
            h.flash(_('An error occurred during setting this'
 
                      ' repository in public journal'),
 
                    category='error')
 
        raise HTTPFound(location=url('edit_repo_advanced', repo_name=repo_name))
 

	
 

	
 
    @HasRepoPermissionLevelDecorator('admin')
 
    def edit_advanced_fork(self, repo_name):
 
        """
 
        Mark given repository as a fork of another
 

	
 
        :param repo_name:
 
        """
 
        try:
 
            fork_id = request.POST.get('id_fork_of')
 
            repo = ScmModel().mark_as_fork(repo_name, fork_id,
 
                                           request.authuser.username)
 
            fork = repo.fork.repo_name if repo.fork else _('Nothing')
 
            Session().commit()
 
            h.flash(_('Marked repository %s as fork of %s') % (repo_name, fork),
 
                    category='success')
 
        except RepositoryError as e:
 
            log.error(traceback.format_exc())
 
            h.flash(str(e), category='error')
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during this operation'),
 
                    category='error')
 

	
 
        raise HTTPFound(location=url('edit_repo_advanced', repo_name=repo_name))
kallithea/controllers/admin/settings.py
Show inline comments
 
@@ -81,49 +81,49 @@ class SettingsController(BaseController)
 
            settings[k] = v
 
        return settings
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_vcs(self):
 
        c.active = 'vcs'
 
        if request.POST:
 
            application_form = ApplicationUiSettingsForm()()
 
            try:
 
                form_result = application_form.to_python(dict(request.POST))
 
            except formencode.Invalid as errors:
 
                return htmlfill.render(
 
                     render('admin/settings/settings.html'),
 
                     defaults=errors.value,
 
                     errors=errors.error_dict or {},
 
                     prefix_error=False,
 
                     encoding="UTF-8",
 
                     force_defaults=False)
 

	
 
            try:
 
                if c.visual.allow_repo_location_change:
 
                    sett = Ui.get_by_key('paths', '/')
 
                    sett.ui_value = form_result['paths_root_path']
 

	
 
                #HOOKS
 
                # HOOKS
 
                sett = Ui.get_by_key('hooks', Ui.HOOK_UPDATE)
 
                sett.ui_active = form_result['hooks_changegroup_update']
 

	
 
                sett = Ui.get_by_key('hooks', Ui.HOOK_REPO_SIZE)
 
                sett.ui_active = form_result['hooks_changegroup_repo_size']
 

	
 
                sett = Ui.get_by_key('hooks', Ui.HOOK_PUSH)
 
                sett.ui_active = form_result['hooks_changegroup_push_logger']
 

	
 
                sett = Ui.get_by_key('hooks', Ui.HOOK_PULL)
 
                sett.ui_active = form_result['hooks_outgoing_pull_logger']
 

	
 
                ## EXTENSIONS
 
                sett = Ui.get_or_create('extensions', 'largefiles')
 
                sett.ui_active = form_result['extensions_largefiles']
 

	
 
                sett = Ui.get_or_create('extensions', 'hgsubversion')
 
                sett.ui_active = form_result['extensions_hgsubversion']
 
                if sett.ui_active:
 
                    try:
 
                        import hgsubversion  # pragma: no cover
 
                    except ImportError:
 
                        raise HgsubversionImportError
 

	
 
@@ -139,49 +139,49 @@ class SettingsController(BaseController)
 
                h.flash(_('Unable to activate hgsubversion support. '
 
                          'The "hgsubversion" library is missing'),
 
                        category='error')
 

	
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                h.flash(_('Error occurred while updating '
 
                          'application settings'), category='error')
 

	
 
        defaults = Setting.get_app_settings()
 
        defaults.update(self._get_hg_ui_settings())
 

	
 
        return htmlfill.render(
 
            render('admin/settings/settings.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False)
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def settings_mapping(self):
 
        c.active = 'mapping'
 
        if request.POST:
 
            rm_obsolete = request.POST.get('destroy', False)
 
            install_git_hooks = request.POST.get('hooks', False)
 
            overwrite_git_hooks = request.POST.get('hooks_overwrite', False);
 
            overwrite_git_hooks = request.POST.get('hooks_overwrite', False)
 
            invalidate_cache = request.POST.get('invalidate', False)
 
            log.debug('rescanning repo location with destroy obsolete=%s, '
 
                      'install git hooks=%s and '
 
                      'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks))
 

	
 
            filesystem_repos = ScmModel().repo_scan()
 
            added, removed = repo2db_mapper(filesystem_repos, rm_obsolete,
 
                                            install_git_hooks=install_git_hooks,
 
                                            user=request.authuser.username,
 
                                            overwrite_git_hooks=overwrite_git_hooks)
 
            h.flash(h.literal(_('Repositories successfully rescanned. Added: %s. Removed: %s.') %
 
                (', '.join(h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name))
 
                 for repo_name in added) or '-',
 
                 ', '.join(h.escape(safe_unicode(repo_name)) for repo_name in removed) or '-')),
 
                category='success')
 

	
 
            if invalidate_cache:
 
                log.debug('invalidating all repositories cache')
 
                i = 0
 
                for repo in Repository.query():
 
                    try:
 
                        ScmModel().mark_for_invalidation(repo.repo_name)
 
                        i += 1
 
                    except VCSError as e:
kallithea/controllers/admin/user_groups.py
Show inline comments
 
@@ -134,49 +134,49 @@ class UserGroupsController(BaseControlle
 
        try:
 
            form_result = users_group_form.to_python(dict(request.POST))
 
            ug = UserGroupModel().create(name=form_result['users_group_name'],
 
                                         description=form_result['user_group_description'],
 
                                         owner=request.authuser.user_id,
 
                                         active=form_result['users_group_active'])
 

	
 
            gr = form_result['users_group_name']
 
            action_logger(request.authuser,
 
                          'admin_created_users_group:%s' % gr,
 
                          None, request.ip_addr)
 
            h.flash(h.literal(_('Created user group %s') % h.link_to(h.escape(gr), url('edit_users_group', id=ug.users_group_id))),
 
                category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            return htmlfill.render(
 
                render('admin/user_groups/user_group_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during creation of user group %s') \
 
            h.flash(_('Error occurred during creation of user group %s')
 
                    % request.POST.get('users_group_name'), category='error')
 

	
 
        raise HTTPFound(location=url('users_groups'))
 

	
 
    @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
 
    def new(self, format='html'):
 
        return render('admin/user_groups/user_group_add.html')
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def update(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'settings'
 
        self.__load_data(id)
 

	
 
        available_members = [safe_unicode(x[0]) for x in c.available_members]
 

	
 
        users_group_form = UserGroupForm(edit=True,
 
                                         old_data=c.user_group.get_dict(),
 
                                         available_members=available_members)()
 

	
 
        try:
 
            form_result = users_group_form.to_python(request.POST)
 
            UserGroupModel().update(c.user_group, form_result)
 
            gr = form_result['users_group_name']
 
@@ -184,49 +184,49 @@ class UserGroupsController(BaseControlle
 
                          'admin_updated_users_group:%s' % gr,
 
                          None, request.ip_addr)
 
            h.flash(_('Updated user group %s') % gr, category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            ug_model = UserGroupModel()
 
            defaults = errors.value
 
            e = errors.error_dict or {}
 
            defaults.update({
 
                'create_repo_perm': ug_model.has_perm(id,
 
                                                      'hg.create.repository'),
 
                'fork_repo_perm': ug_model.has_perm(id,
 
                                                    'hg.fork.repository'),
 
            })
 

	
 
            return htmlfill.render(
 
                render('admin/user_groups/user_group_edit.html'),
 
                defaults=defaults,
 
                errors=e,
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of user group %s') \
 
            h.flash(_('Error occurred during update of user group %s')
 
                    % request.POST.get('users_group_name'), category='error')
 

	
 
        raise HTTPFound(location=url('edit_users_group', id=id))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def delete(self, id):
 
        usr_gr = UserGroup.get_or_404(id)
 
        try:
 
            UserGroupModel().delete(usr_gr)
 
            Session().commit()
 
            h.flash(_('Successfully deleted user group'), category='success')
 
        except UserGroupsAssignedException as e:
 
            h.flash(e, category='error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during deletion of user group'),
 
                    category='error')
 
        raise HTTPFound(location=url('users_groups'))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit(self, id, format='html'):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'settings'
 
        self.__load_data(id)
 
@@ -262,49 +262,49 @@ class UserGroupsController(BaseControlle
 
        return htmlfill.render(
 
            render('admin/user_groups/user_group_edit.html'),
 
            defaults=defaults,
 
            encoding="UTF-8",
 
            force_defaults=False
 
        )
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def update_perms(self, id):
 
        """
 
        grant permission for given usergroup
 

	
 
        :param id:
 
        """
 
        user_group = UserGroup.get_or_404(id)
 
        form = UserGroupPermsForm()().to_python(request.POST)
 

	
 
        # set the permissions !
 
        try:
 
            UserGroupModel()._update_permissions(user_group, form['perms_new'],
 
                                                 form['perms_updates'])
 
        except RepoGroupAssignmentError:
 
            h.flash(_('Target group cannot be the same'), category='error')
 
            raise HTTPFound(location=url('edit_user_group_perms', id=id))
 
        #TODO: implement this
 
        # TODO: implement this
 
        #action_logger(request.authuser, 'admin_changed_repo_permissions',
 
        #              repo_name, request.ip_addr)
 
        Session().commit()
 
        h.flash(_('User group permissions updated'), category='success')
 
        raise HTTPFound(location=url('edit_user_group_perms', id=id))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def delete_perms(self, id):
 
        try:
 
            obj_type = request.POST.get('obj_type')
 
            obj_id = None
 
            if obj_type == 'user':
 
                obj_id = safe_int(request.POST.get('user_id'))
 
            elif obj_type == 'user_group':
 
                obj_id = safe_int(request.POST.get('user_group_id'))
 

	
 
            if not request.authuser.is_admin:
 
                if obj_type == 'user' and request.authuser.user_id == obj_id:
 
                    msg = _('Cannot revoke permission for yourself as admin')
 
                    h.flash(msg, category='warning')
 
                    raise Exception('revoke admin permission on self')
 
            if obj_type == 'user':
 
                UserGroupModel().revoke_user_permission(user_group=id,
 
                                                        user=obj_id)
 
@@ -394,34 +394,33 @@ class UserGroupsController(BaseControlle
 
            else:
 
                usergroup_model.grant_perm(id, 'hg.usergroup.create.false')
 
            if form_result['fork_repo_perm']:
 
                usergroup_model.grant_perm(id, 'hg.fork.repository')
 
            else:
 
                usergroup_model.grant_perm(id, 'hg.fork.none')
 

	
 
            h.flash(_("Updated permissions"), category='success')
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during permissions saving'),
 
                    category='error')
 

	
 
        raise HTTPFound(location=url('edit_user_group_default_perms', id=id))
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit_advanced(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'advanced'
 
        c.group_members_obj = sorted((x.user for x in c.user_group.members),
 
                                     key=lambda u: u.username.lower())
 
        return render('admin/user_groups/user_group_edit.html')
 

	
 

	
 
    @HasUserGroupPermissionLevelDecorator('admin')
 
    def edit_members(self, id):
 
        c.user_group = UserGroup.get_or_404(id)
 
        c.active = 'members'
 
        c.group_members_obj = sorted((x.user for x in c.user_group.members),
 
                                     key=lambda u: u.username.lower())
 

	
 
        c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
 
        return render('admin/user_groups/user_group_edit.html')
kallithea/controllers/admin/users.py
Show inline comments
 
@@ -116,92 +116,92 @@ class UsersController(BaseController):
 
        c.default_extern_type = User.DEFAULT_AUTH_TYPE
 
        c.default_extern_name = ''
 
        user_model = UserModel()
 
        user_form = UserForm()()
 
        try:
 
            form_result = user_form.to_python(dict(request.POST))
 
            user = user_model.create(form_result)
 
            action_logger(request.authuser, 'admin_created_user:%s' % user.username,
 
                          None, request.ip_addr)
 
            h.flash(_('Created user %s') % user.username,
 
                    category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            return htmlfill.render(
 
                render('admin/users/user_add.html'),
 
                defaults=errors.value,
 
                errors=errors.error_dict or {},
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except UserCreationError as e:
 
            h.flash(e, 'error')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during creation of user %s') \
 
            h.flash(_('Error occurred during creation of user %s')
 
                    % request.POST.get('username'), category='error')
 
        raise HTTPFound(location=url('edit_user', id=user.user_id))
 

	
 
    def new(self, format='html'):
 
        c.default_extern_type = User.DEFAULT_AUTH_TYPE
 
        c.default_extern_name = ''
 
        return render('admin/users/user_add.html')
 

	
 
    def update(self, id):
 
        user_model = UserModel()
 
        user = user_model.get(id)
 
        _form = UserForm(edit=True, old_data={'user_id': id,
 
                                              'email': user.email})()
 
        form_result = {}
 
        try:
 
            form_result = _form.to_python(dict(request.POST))
 
            skip_attrs = ['extern_type', 'extern_name',
 
                         ] + auth_modules.get_managed_fields(user)
 

	
 
            user_model.update(id, form_result, skip_attrs=skip_attrs)
 
            usr = form_result['username']
 
            action_logger(request.authuser, 'admin_updated_user:%s' % usr,
 
                          None, request.ip_addr)
 
            h.flash(_('User updated successfully'), category='success')
 
            Session().commit()
 
        except formencode.Invalid as errors:
 
            defaults = errors.value
 
            e = errors.error_dict or {}
 
            defaults.update({
 
                'create_repo_perm': user_model.has_perm(id,
 
                                                        'hg.create.repository'),
 
                'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'),
 
            })
 
            return htmlfill.render(
 
                self._render_edit_profile(user),
 
                defaults=defaults,
 
                errors=e,
 
                prefix_error=False,
 
                encoding="UTF-8",
 
                force_defaults=False)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('Error occurred during update of user %s') \
 
            h.flash(_('Error occurred during update of user %s')
 
                    % form_result.get('username'), category='error')
 
        raise HTTPFound(location=url('edit_user', id=id))
 

	
 
    def delete(self, id):
 
        usr = User.get_or_404(id)
 
        try:
 
            UserModel().delete(usr)
 
            Session().commit()
 
            h.flash(_('Successfully deleted user'), category='success')
 
        except (UserOwnsReposException, DefaultUserException) as e:
 
            h.flash(e, category='warning')
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            h.flash(_('An error occurred during deletion of user'),
 
                    category='error')
 
        raise HTTPFound(location=url('users'))
 

	
 
    def _get_user_or_raise_if_default(self, id):
 
        try:
 
            return User.get_or_404(id, allow_default=False)
 
        except DefaultUserException:
 
            h.flash(_("The default user cannot be edited"), category='warning')
 
            raise HTTPNotFound
 

	
kallithea/controllers/changelog.py
Show inline comments
 
@@ -93,49 +93,49 @@ class ChangelogController(BaseRepoContro
 
        # min size must be 1
 
        c.size = max(c.size, 1)
 
        p = safe_int(request.GET.get('page'), 1)
 
        branch_name = request.GET.get('branch', None)
 
        if (branch_name and
 
            branch_name not in c.db_repo_scm_instance.branches and
 
            branch_name not in c.db_repo_scm_instance.closed_branches and
 
            not revision):
 
            raise HTTPFound(location=url('changelog_file_home', repo_name=c.repo_name,
 
                                    revision=branch_name, f_path=f_path or ''))
 

	
 
        if revision == 'tip':
 
            revision = None
 

	
 
        c.changelog_for_path = f_path
 
        try:
 

	
 
            if f_path:
 
                log.debug('generating changelog for path %s', f_path)
 
                # get the history for the file !
 
                tip_cs = c.db_repo_scm_instance.get_changeset()
 
                try:
 
                    collection = tip_cs.get_file_history(f_path)
 
                except (NodeDoesNotExistError, ChangesetError):
 
                    #this node is not present at tip !
 
                    # this node is not present at tip !
 
                    try:
 
                        cs = self.__get_cs(revision, repo_name)
 
                        collection = cs.get_file_history(f_path)
 
                    except RepositoryError as e:
 
                        h.flash(safe_str(e), category='warning')
 
                        raise HTTPFound(location=h.url('changelog_home', repo_name=repo_name))
 
                collection = list(reversed(collection))
 
            else:
 
                collection = c.db_repo_scm_instance.get_changesets(start=0, end=revision,
 
                                                        branch_name=branch_name)
 
            c.total_cs = len(collection)
 

	
 
            c.cs_pagination = RepoPage(collection, page=p, item_count=c.total_cs,
 
                                    items_per_page=c.size, branch=branch_name,)
 

	
 
            page_revisions = [x.raw_id for x in c.cs_pagination]
 
            c.cs_comments = c.db_repo.get_comments(page_revisions)
 
            c.cs_statuses = c.db_repo.statuses(page_revisions)
 
        except EmptyRepositoryError as e:
 
            h.flash(safe_str(e), category='warning')
 
            raise HTTPFound(location=url('summary_home', repo_name=c.repo_name))
 
        except (RepositoryError, ChangesetDoesNotExistError, Exception) as e:
 
            log.error(traceback.format_exc())
 
            h.flash(safe_str(e), category='error')
kallithea/controllers/changeset.py
Show inline comments
 
@@ -94,49 +94,49 @@ def _ignorews_url(GET, fileid=None):
 
        ctx_val = ln_ctx
 
    # per file options
 
    else:
 
        if ig_ws is None:
 
            params[fileid] += ['WS:1']
 
            lbl = _('Ignore whitespace')
 

	
 
        ctx_key = fileid
 
        ctx_val = 'C:%s' % ln_ctx
 
    # if we have passed in ln_ctx pass it along to our params
 
    if ln_ctx:
 
        params[ctx_key] += [ctx_val]
 

	
 
    params['anchor'] = fileid
 
    icon = h.literal('<i class="icon-strike"></i>')
 
    return h.link_to(icon, h.url.current(**params), title=lbl, **{'data-toggle': 'tooltip'})
 

	
 

	
 
def get_line_ctx(fid, GET):
 
    ln_ctx_global = GET.get('context')
 
    if fid:
 
        ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
 
    else:
 
        _ln_ctx = filter(lambda k: k.startswith('C'), GET)
 
        ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx  else ln_ctx_global
 
        ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx else ln_ctx_global
 
        if ln_ctx:
 
            ln_ctx = [ln_ctx]
 

	
 
    if ln_ctx:
 
        retval = ln_ctx[0].split(':')[-1]
 
    else:
 
        retval = ln_ctx_global
 

	
 
    try:
 
        return int(retval)
 
    except Exception:
 
        return 3
 

	
 

	
 
def _context_url(GET, fileid=None):
 
    """
 
    Generates url for context lines
 

	
 
    :param fileid:
 
    """
 

	
 
    fileid = str(fileid) if fileid else None
 
    ig_ws = get_ignore_ws(fileid, GET)
 
    ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
 
@@ -186,49 +186,49 @@ def create_comment(text, status, f_path,
 
        status_change=ChangesetStatus.get_status_lbl(status) if status else None,
 
        closing_pr=closing_pr,
 
    )
 

	
 
    return comment
 

	
 

	
 
class ChangesetController(BaseRepoController):
 

	
 
    def _before(self, *args, **kwargs):
 
        super(ChangesetController, self)._before(*args, **kwargs)
 
        c.affected_files_cut_off = 60
 

	
 
    def __load_data(self):
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 

	
 
    def _index(self, revision, method):
 
        c.pull_request = None
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.fulldiff = fulldiff = request.GET.get('fulldiff')
 
        #get ranges of revisions if preset
 
        # get ranges of revisions if preset
 
        rev_range = revision.split('...')[:2]
 
        enable_comments = True
 
        c.cs_repo = c.db_repo
 
        try:
 
            if len(rev_range) == 2:
 
                enable_comments = False
 
                rev_start = rev_range[0]
 
                rev_end = rev_range[1]
 
                rev_ranges = c.db_repo_scm_instance.get_changesets(start=rev_start,
 
                                                             end=rev_end)
 
            else:
 
                rev_ranges = [c.db_repo_scm_instance.get_changeset(revision)]
 

	
 
            c.cs_ranges = list(rev_ranges)
 
            if not c.cs_ranges:
 
                raise RepositoryError('Changeset range returned empty result')
 

	
 
        except (ChangesetDoesNotExistError, EmptyRepositoryError):
 
            log.debug(traceback.format_exc())
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 

	
 
        c.changes = OrderedDict()
 
@@ -279,49 +279,49 @@ class ChangesetController(BaseRepoContro
 
                                                 format='gitdiff',
 
                                                 diff_limit=diff_limit)
 
            file_diff_data = []
 
            if method == 'show':
 
                _parsed = diff_processor.prepare()
 
                c.limited_diff = False
 
                if isinstance(_parsed, LimitedDiffContainer):
 
                    c.limited_diff = True
 
                for f in _parsed:
 
                    st = f['stats']
 
                    c.lines_added += st['added']
 
                    c.lines_deleted += st['deleted']
 
                    filename = f['filename']
 
                    fid = h.FID(changeset.raw_id, filename)
 
                    url_fid = h.FID('', filename)
 
                    diff = diff_processor.as_html(enable_comments=enable_comments,
 
                                                  parsed_lines=[f])
 
                    file_diff_data.append((fid, url_fid, f['operation'], f['old_filename'], filename, diff, st))
 
            else:
 
                # downloads/raw we only need RAW diff nothing else
 
                diff = diff_processor.as_raw()
 
                file_diff_data.append(('', None, None, None, diff, None))
 
            c.changes[changeset.raw_id] = (cs1, cs2, file_diff_data)
 

	
 
        #sort comments in creation order
 
        # sort comments in creation order
 
        c.comments = [com for com_id, com in sorted(comments.items())]
 

	
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 

	
 
        if len(c.cs_ranges) == 1:
 
            c.changeset = c.cs_ranges[0]
 
            c.parent_tmpl = ''.join(['# Parent  %s\n' % x.raw_id
 
                                     for x in c.changeset.parents])
 
        if method == 'download':
 
            response.content_type = 'text/plain'
 
            response.content_disposition = 'attachment; filename=%s.diff' \
 
                                            % revision[:12]
 
            return diff
 
        elif method == 'patch':
 
            response.content_type = 'text/plain'
 
            c.diff = safe_unicode(diff)
 
            return render('changeset/patch_changeset.html')
 
        elif method == 'raw':
 
            response.content_type = 'text/plain'
 
            return diff
 
        elif method == 'show':
kallithea/controllers/compare.py
Show inline comments
 
@@ -73,57 +73,57 @@ class CompareController(BaseRepoControll
 
            msg = _('Cannot compare repositories of different types')
 
            h.flash(msg, category='error')
 
            raise HTTPFound(location=url('compare_home', repo_name=c.a_repo.repo_name))
 

	
 
    @staticmethod
 
    def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev):
 
        """
 
        Returns lists of changesets that can be merged from org_repo@org_rev
 
        to other_repo@other_rev
 
        ... and the other way
 
        ... and the ancestors that would be used for merge
 

	
 
        :param org_repo: repo object, that is most likely the original repo we forked from
 
        :param org_rev: the revision we want our compare to be made
 
        :param other_repo: repo object, most likely the fork of org_repo. It has
 
            all changesets that we need to obtain
 
        :param other_rev: revision we want out compare to be made on other_repo
 
        """
 
        ancestors = None
 
        if org_rev == other_rev:
 
            org_changesets = []
 
            other_changesets = []
 

	
 
        elif alias == 'hg':
 
            #case two independent repos
 
            # case two independent repos
 
            if org_repo != other_repo:
 
                hgrepo = unionrepo.unionrepository(other_repo.baseui,
 
                                                   other_repo.path,
 
                                                   org_repo.path)
 
                # all ancestors of other_rev will be in other_repo and
 
                # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot
 

	
 
            #no remote compare do it on the same repository
 
            # no remote compare do it on the same repository
 
            else:
 
                hgrepo = other_repo._repo
 

	
 
            ancestors = [hgrepo[ancestor].hex() for ancestor in
 
                         hgrepo.revs("id(%s) & ::id(%s)", other_rev, org_rev)]
 
            if ancestors:
 
                log.debug("shortcut found: %s is already an ancestor of %s", other_rev, org_rev)
 
            else:
 
                log.debug("no shortcut found: %s is not an ancestor of %s", other_rev, org_rev)
 
                ancestors = [hgrepo[ancestor].hex() for ancestor in
 
                             hgrepo.revs("heads(::id(%s) & ::id(%s))", org_rev, other_rev)] # FIXME: expensive!
 

	
 
            other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
 
                                     other_rev, org_rev, org_rev)
 
            other_changesets = [other_repo.get_changeset(rev) for rev in other_revs]
 
            org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)",
 
                                   org_rev, other_rev, other_rev)
 
            org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs]
 

	
 
        elif alias == 'git':
 
            if org_repo != other_repo:
 
                from dulwich.repo import Repo
 
                from dulwich.client import SubprocessGitClient
 

	
kallithea/controllers/feed.py
Show inline comments
 
@@ -62,49 +62,49 @@ class FeedController(BaseRepoController)
 
    def __changes(self, cs):
 
        changes = []
 
        rss_cut_off_limit = safe_int(CONFIG.get('rss_cut_off_limit', 32 * 1024))
 
        diff_processor = DiffProcessor(cs.diff(),
 
                                       diff_limit=rss_cut_off_limit)
 
        _parsed = diff_processor.prepare(inline_diff=False)
 
        limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            limited_diff = True
 

	
 
        for st in _parsed:
 
            st.update({'added': st['stats']['added'],
 
                       'removed': st['stats']['deleted']})
 
            changes.append('\n %(operation)s %(filename)s '
 
                           '(%(added)s lines added, %(removed)s lines removed)'
 
                            % st)
 
        if limited_diff:
 
            changes = changes + ['\n ' +
 
                                 _('Changeset was too big and was cut off...')]
 
        return diff_processor, changes
 

	
 
    def __get_desc(self, cs):
 
        desc_msg = [(_('%s committed on %s')
 
                     % (h.person(cs.author), h.fmt_date(cs.date))) + '<br/>']
 
        #branches, tags, bookmarks
 
        # branches, tags, bookmarks
 
        if cs.branch:
 
            desc_msg.append('branch: %s<br/>' % cs.branch)
 
        for book in cs.bookmarks:
 
            desc_msg.append('bookmark: %s<br/>' % book)
 
        for tag in cs.tags:
 
            desc_msg.append('tag: %s<br/>' % tag)
 
        diff_processor, changes = self.__changes(cs)
 
        # rev link
 
        _url = h.canonical_url('changeset_home', repo_name=c.db_repo.repo_name,
 
                   revision=cs.raw_id)
 
        desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8]))
 

	
 
        desc_msg.append('<pre>')
 
        desc_msg.append(h.urlify_text(cs.message))
 
        desc_msg.append('\n')
 
        desc_msg.extend(changes)
 
        if str2bool(CONFIG.get('rss_include_diff', False)):
 
            desc_msg.append('\n\n')
 
            desc_msg.append(diff_processor.as_raw())
 
        desc_msg.append('</pre>')
 
        return map(safe_unicode, desc_msg)
 

	
 
    def atom(self, repo_name):
 
        """Produce an atom-1.0 feed via feedgenerator module"""
kallithea/controllers/files.py
Show inline comments
 
@@ -145,49 +145,49 @@ class FilesController(BaseRepoController
 
            prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch)
 
            c.url_prev = url('files_home', repo_name=c.repo_name,
 
                         revision=prev_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_prev += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_prev = '#'
 

	
 
        # next link
 
        try:
 
            next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch)
 
            c.url_next = url('files_home', repo_name=c.repo_name,
 
                     revision=next_rev.raw_id, f_path=f_path)
 
            if c.branch:
 
                c.url_next += '?branch=%s' % c.branch
 
        except (ChangesetDoesNotExistError, VCSError):
 
            c.url_next = '#'
 

	
 
        # files or dirs
 
        try:
 
            c.file = c.changeset.get_node(f_path)
 

	
 
            if c.file.is_file():
 
                c.load_full_history = False
 
                #determine if we're on branch head
 
                # determine if we're on branch head
 
                _branches = c.db_repo_scm_instance.branches
 
                c.on_branch_head = revision in _branches.keys() + _branches.values()
 
                _hist = []
 
                c.file_history = []
 
                if c.load_full_history:
 
                    c.file_history, _hist = self._get_node_history(c.changeset, f_path)
 

	
 
                c.authors = []
 
                for a in set([x.author for x in _hist]):
 
                    c.authors.append((h.email(a), h.person(a)))
 
            else:
 
                c.authors = c.file_history = []
 
        except RepositoryError as e:
 
            h.flash(safe_str(e), category='error')
 
            raise HTTPNotFound()
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return render('files/files_ypjax.html')
 

	
 
        # TODO: tags and bookmarks?
 
        c.revision_options = [(c.changeset.raw_id,
 
                              _('%s at %s') % (c.changeset.branch, h.short_id(c.changeset.raw_id)))] + \
 
            [(n, b) for b, n in c.db_repo_scm_instance.branches.items()]
 
        if c.db_repo_scm_instance.closed_branches:
 
@@ -439,49 +439,49 @@ class FilesController(BaseRepoController
 
            unix_mode = 0
 
            content = convert_line_endings(r_post.get('content', ''), unix_mode)
 

	
 
            message = r_post.get('message') or c.default_message
 
            filename = r_post.get('filename')
 
            location = r_post.get('location', '')
 
            file_obj = r_post.get('upload_file', None)
 

	
 
            if file_obj is not None and hasattr(file_obj, 'filename'):
 
                filename = file_obj.filename
 
                content = file_obj.file
 

	
 
                if hasattr(content, 'file'):
 
                    # non posix systems store real file under file attr
 
                    content = content.file
 

	
 
            if not content:
 
                h.flash(_('No content'), category='warning')
 
                raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            if not filename:
 
                h.flash(_('No filename'), category='warning')
 
                raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
 
                                    revision='tip'))
 
            #strip all crap out of file, just leave the basename
 
            # strip all crap out of file, just leave the basename
 
            filename = os.path.basename(filename)
 
            node_path = posixpath.join(location, filename)
 
            author = request.authuser.full_contact
 

	
 
            try:
 
                nodes = {
 
                    node_path: {
 
                        'content': content
 
                    }
 
                }
 
                self.scm_model.create_nodes(
 
                    user=request.authuser.user_id, repo=c.db_repo,
 
                    message=message,
 
                    nodes=nodes,
 
                    parent_cs=c.cs,
 
                    author=author,
 
                )
 

	
 
                h.flash(_('Successfully committed to %s') % node_path,
 
                        category='success')
 
            except NonRelativePathError as e:
 
                h.flash(_('Location must be relative path and must not '
 
                          'contain .. in path'), category='warning')
 
                raise HTTPFound(location=url('changeset_home', repo_name=c.repo_name,
 
@@ -578,50 +578,50 @@ class FilesController(BaseRepoController
 
                      action='user_downloaded_archive:%s' % (archive_name),
 
                      repo=repo_name, ipaddr=request.ip_addr, commit=True)
 

	
 
        response.content_disposition = str('attachment; filename=%s' % (archive_name))
 
        response.content_type = str(content_type)
 
        return get_chunked_archive(archive_path)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionLevelDecorator('read')
 
    def diff(self, repo_name, f_path):
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = safe_int(request.GET.get('context'), 3)
 
        diff2 = request.GET.get('diff2', '')
 
        diff1 = request.GET.get('diff1', '') or diff2
 
        c.action = request.GET.get('diff')
 
        c.no_changes = diff1 == diff2
 
        c.f_path = f_path
 
        c.big_diff = False
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.changes = OrderedDict()
 
        c.changes[diff2] = []
 

	
 
        #special case if we want a show rev only, it's impl here
 
        #to reduce JS and callbacks
 
        # special case if we want a show rev only, it's impl here
 
        # to reduce JS and callbacks
 

	
 
        if request.GET.get('show_rev'):
 
            if str2bool(request.GET.get('annotate', 'False')):
 
                _url = url('files_annotate_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 
            else:
 
                _url = url('files_home', repo_name=c.repo_name,
 
                           revision=diff1, f_path=c.f_path)
 

	
 
            raise HTTPFound(location=_url)
 
        try:
 
            if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]:
 
                c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1)
 
                try:
 
                    node1 = c.changeset_1.get_node(f_path)
 
                    if node1.is_dir():
 
                        raise NodeError('%s path is a %s not a file'
 
                                        % (node1, type(node1)))
 
                except NodeDoesNotExistError:
 
                    c.changeset_1 = EmptyChangeset(cs=diff1,
 
                                                   revision=c.changeset_1.revision,
 
                                                   repo=c.db_repo_scm_instance)
 
                    node1 = FileNode(f_path, '', changeset=c.changeset_1)
 
            else:
 
@@ -727,49 +727,49 @@ class FilesController(BaseRepoController
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 
        c.node1 = node1
 
        c.node2 = node2
 
        c.cs1 = c.changeset_1
 
        c.cs2 = c.changeset_2
 

	
 
        return render('files/diff_2way.html')
 

	
 
    def _get_node_history(self, cs, f_path, changesets=None):
 
        """
 
        get changesets history for given node
 

	
 
        :param cs: changeset to calculate history
 
        :param f_path: path for node to calculate history for
 
        :param changesets: if passed don't calculate history and take
 
            changesets defined in this list
 
        """
 
        # calculate history based on tip
 
        tip_cs = c.db_repo_scm_instance.get_changeset()
 
        if changesets is None:
 
            try:
 
                changesets = tip_cs.get_file_history(f_path)
 
            except (NodeDoesNotExistError, ChangesetError):
 
                #this node is not present at tip !
 
                # this node is not present at tip !
 
                changesets = cs.get_file_history(f_path)
 
        hist_l = []
 

	
 
        changesets_group = ([], _("Changesets"))
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 
        for chs in changesets:
 
            #_branch = '(%s)' % chs.branch if (cs.repository.alias == 'hg') else ''
 
            _branch = chs.branch
 
            n_desc = '%s (%s)' % (h.show_id(chs), _branch)
 
            changesets_group[0].append((chs.raw_id, n_desc,))
 
        hist_l.append(changesets_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.branches.items():
 
            branches_group[0].append((chs, name),)
 
        hist_l.append(branches_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.tags.items():
 
            tags_group[0].append((chs, name),)
 
        hist_l.append(tags_group)
 

	
 
        return hist_l, changesets
 

	
 
    @LoginRequired()
kallithea/controllers/home.py
Show inline comments
 
@@ -36,57 +36,57 @@ from sqlalchemy.sql.expression import fu
 
from kallithea.lib.utils import conditional_cache
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionLevelDecorator
 
from kallithea.lib.base import BaseController, render, jsonify
 
from kallithea.model.db import Repository, RepoGroup
 
from kallithea.model.repo import RepoModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class HomeController(BaseController):
 

	
 
    def about(self):
 
        return render('/about.html')
 

	
 
    @LoginRequired()
 
    def index(self):
 
        c.groups = self.scm_model.get_repo_groups()
 
        c.group = None
 

	
 
        repos_list = Repository.query(sorted=True).filter_by(group=None).all()
 

	
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list,
 
                                                   admin=False, short_name=True)
 
        #data used to render the grid
 
        # data used to render the grid
 
        c.data = repos_data
 

	
 
        return render('/index.html')
 

	
 
    @LoginRequired()
 
    @jsonify
 
    def repo_switcher_data(self):
 
        #wrapper for conditional cache
 
        # wrapper for conditional cache
 
        def _c():
 
            log.debug('generating switcher repo/groups list')
 
            all_repos = Repository.query(sorted=True).all()
 
            repo_iter = self.scm_model.get_repos(all_repos)
 
            all_groups = RepoGroup.query(sorted=True).all()
 
            repo_groups_iter = self.scm_model.get_repo_groups(all_groups)
 

	
 
            res = [{
 
                    'text': _('Groups'),
 
                    'children': [
 
                       {'id': obj.group_name,
 
                        'text': obj.group_name,
 
                        'type': 'group',
 
                        'obj': {}}
 
                       for obj in repo_groups_iter
 
                    ],
 
                   },
 
                   {
 
                    'text': _('Repositories'),
 
                    'children': [
 
                       {'id': obj.repo_name,
 
                        'text': obj.repo_name,
 
                        'type': 'repo',
 
                        'obj': obj.get_dict()}
kallithea/controllers/journal.py
Show inline comments
 
@@ -48,147 +48,147 @@ from kallithea.model.repo import RepoMod
 
import kallithea.lib.helpers as h
 
from kallithea.lib.auth import LoginRequired, NotAnonymous
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.page import Page
 
from kallithea.lib.utils2 import safe_int, AttributeDict
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
language = 'en-us'
 
ttl = "5"
 
feed_nr = 20
 

	
 

	
 
class JournalController(BaseController):
 

	
 
    def _before(self, *args, **kwargs):
 
        super(JournalController, self)._before(*args, **kwargs)
 
        c.search_term = request.GET.get('filter')
 

	
 
    def _get_daily_aggregate(self, journal):
 
        groups = []
 
        for k, g in groupby(journal, lambda x: x.action_as_day):
 
            user_group = []
 
            #groupby username if it's a present value, else fallback to journal username
 
            # groupby username if it's a present value, else fallback to journal username
 
            for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username):
 
                l = list(g2)
 
                user_group.append((l[0].user, l))
 

	
 
            groups.append((k, user_group,))
 

	
 
        return groups
 

	
 
    def _get_journal_data(self, following_repos):
 
        repo_ids = [x.follows_repository_id for x in following_repos
 
                    if x.follows_repository_id is not None]
 
        user_ids = [x.follows_user_id for x in following_repos
 
                    if x.follows_user_id is not None]
 

	
 
        filtering_criterion = None
 

	
 
        if repo_ids and user_ids:
 
            filtering_criterion = or_(UserLog.repository_id.in_(repo_ids),
 
                        UserLog.user_id.in_(user_ids))
 
        if repo_ids and not user_ids:
 
            filtering_criterion = UserLog.repository_id.in_(repo_ids)
 
        if not repo_ids and user_ids:
 
            filtering_criterion = UserLog.user_id.in_(user_ids)
 
        if filtering_criterion is not None:
 
            journal = UserLog.query() \
 
                .options(joinedload(UserLog.user)) \
 
                .options(joinedload(UserLog.repository))
 
            #filter
 
            # filter
 
            journal = _journal_filter(journal, c.search_term)
 
            journal = journal.filter(filtering_criterion) \
 
                        .order_by(UserLog.action_date.desc())
 
        else:
 
            journal = []
 

	
 
        return journal
 

	
 
    def _atom_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('Public Journal'),
 
                                  'atom feed')
 
        else:
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('Journal'), 'atom feed')
 

	
 
        feed = Atom1Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=language,
 
                         ttl=ttl)
 

	
 
        for entry in journal[:feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                # fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    def _rss_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('Public Journal'),
 
                                  'rss feed')
 
        else:
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('Journal'), 'rss feed')
 

	
 
        feed = Rss201rev2Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=language,
 
                         ttl=ttl)
 

	
 
        for entry in journal[:feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                # fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
@@ -196,49 +196,49 @@ class JournalController(BaseController):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page'), 1)
 
        c.user = User.get(request.authuser.user_id)
 
        c.following = UserFollowing.query() \
 
            .filter(UserFollowing.user_id == request.authuser.user_id) \
 
            .options(joinedload(UserFollowing.follows_repository)) \
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator)
 
        c.journal_day_aggregate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return render('journal/journal_data.html')
 

	
 
        repos_list = Repository.query(sorted=True) \
 
            .filter_by(owner_id=request.authuser.user_id).all()
 

	
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list,
 
                                                   admin=True)
 
        #data used to render the grid
 
        # data used to render the grid
 
        c.data = repos_data
 

	
 
        return render('journal/journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        following = UserFollowing.query() \
 
            .filter(UserFollowing.user_id == request.authuser.user_id) \
 
            .options(joinedload(UserFollowing.follows_repository)) \
 
            .all()
 
        return self._atom_feed(following, public=False)
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_rss(self):
 
        """
 
        Produce an rss feed via feedgenerator module
 
        """
 
        following = UserFollowing.query() \
 
            .filter(UserFollowing.user_id == request.authuser.user_id) \
kallithea/controllers/pullrequests.py
Show inline comments
 
@@ -370,49 +370,49 @@ class PullrequestsController(BaseRepoCon
 
            raise HTTPNotFound
 

	
 
        try:
 
            pull_request = cmd.execute()
 
            Session().commit()
 
        except Exception:
 
            h.flash(_('Error occurred while creating pull request'),
 
                    category='error')
 
            log.error(traceback.format_exc())
 
            raise HTTPFound(location=old_pull_request.url())
 

	
 
        h.flash(_('New pull request iteration created'),
 
                category='success')
 
        raise HTTPFound(location=pull_request.url())
 

	
 
    # pullrequest_post for PR editing
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionLevelDecorator('read')
 
    def post(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        if pull_request.is_closed():
 
            raise HTTPForbidden()
 
        assert pull_request.other_repo.repo_name == repo_name
 
        #only owner or admin can update it
 
        # only owner or admin can update it
 
        owner = pull_request.owner_id == request.authuser.user_id
 
        repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
 
        if not (h.HasPermissionAny('hg.admin')() or repo_admin or owner):
 
            raise HTTPForbidden()
 

	
 
        _form = PullRequestPostForm()().to_python(request.POST)
 

	
 
        cur_reviewers = set(pull_request.get_reviewer_users())
 
        new_reviewers = set(_get_reviewer(s) for s in _form['review_members'])
 
        old_reviewers = set(_get_reviewer(s) for s in _form['org_review_members'])
 

	
 
        other_added = cur_reviewers - old_reviewers
 
        other_removed = old_reviewers - cur_reviewers
 

	
 
        if other_added:
 
            h.flash(_('Meanwhile, the following reviewers have been added: %s') %
 
                    (', '.join(u.username for u in other_added)),
 
                    category='warning')
 
        if other_removed:
 
            h.flash(_('Meanwhile, the following reviewers have been removed: %s') %
 
                    (', '.join(u.username for u in other_removed)),
 
                    category='warning')
 

	
 
        if _form['updaterev']:
 
@@ -425,49 +425,49 @@ class PullrequestsController(BaseRepoCon
 
        added_reviewers = new_reviewers - old_reviewers - cur_reviewers
 
        removed_reviewers = (old_reviewers - new_reviewers) & cur_reviewers
 

	
 
        old_description = pull_request.description
 
        pull_request.title = _form['pullrequest_title']
 
        pull_request.description = _form['pullrequest_desc'].strip() or _('No description')
 
        pull_request.owner = User.get_by_username(_form['owner'])
 
        user = User.get(request.authuser.user_id)
 

	
 
        PullRequestModel().mention_from_description(user, pull_request, old_description)
 
        PullRequestModel().add_reviewers(user, pull_request, added_reviewers)
 
        PullRequestModel().remove_reviewers(user, pull_request, removed_reviewers)
 

	
 
        Session().commit()
 
        h.flash(_('Pull request updated'), category='success')
 

	
 
        raise HTTPFound(location=pull_request.url())
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionLevelDecorator('read')
 
    @jsonify
 
    def delete(self, repo_name, pull_request_id):
 
        pull_request = PullRequest.get_or_404(pull_request_id)
 
        #only owner can delete it !
 
        # only owner can delete it !
 
        if pull_request.owner_id == request.authuser.user_id:
 
            PullRequestModel().delete(pull_request)
 
            Session().commit()
 
            h.flash(_('Successfully deleted pull request'),
 
                    category='success')
 
            raise HTTPFound(location=url('my_pullrequests'))
 
        raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionLevelDecorator('read')
 
    def show(self, repo_name, pull_request_id, extra=None):
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 
        c.pull_request = PullRequest.get_or_404(pull_request_id)
 
        c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request)
 
        cc_model = ChangesetCommentsModel()
 
        cs_model = ChangesetStatusModel()
 

	
 
        # pull_requests repo_name we opened it against
 
        # ie. other_repo must match
 
        if repo_name != c.pull_request.other_repo.repo_name:
 
            raise HTTPNotFound
 

	
 
@@ -575,49 +575,49 @@ class PullrequestsController(BaseRepoCon
 

	
 
        c.avail_revs = avail_revs
 
        c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show]
 
        c.avail_jsdata = graph_data(org_scm_instance, avail_show)
 

	
 
        raw_ids = [x.raw_id for x in c.cs_ranges]
 
        c.cs_comments = c.cs_repo.get_comments(raw_ids)
 
        c.cs_statuses = c.cs_repo.statuses(raw_ids)
 

	
 
        ignore_whitespace = request.GET.get('ignorews') == '1'
 
        line_context = safe_int(request.GET.get('context'), 3)
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.fulldiff = request.GET.get('fulldiff')
 
        diff_limit = self.cut_off_limit if not c.fulldiff else None
 

	
 
        # we swap org/other ref since we run a simple diff on one repo
 
        log.debug('running diff between %s and %s in %s',
 
                  c.a_rev, c.cs_rev, org_scm_instance.path)
 
        try:
 
            txtdiff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev),
 
                                                ignore_whitespace=ignore_whitespace,
 
                                                context=line_context)
 
        except ChangesetDoesNotExistError:
 
            txtdiff =  _("The diff can't be shown - the PR revisions could not be found.")
 
            txtdiff = _("The diff can't be shown - the PR revisions could not be found.")
 
        diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff',
 
                                             diff_limit=diff_limit)
 
        _parsed = diff_processor.prepare()
 

	
 
        c.limited_diff = False
 
        if isinstance(_parsed, LimitedDiffContainer):
 
            c.limited_diff = True
 

	
 
        c.file_diff_data = []
 
        c.lines_added = 0
 
        c.lines_deleted = 0
 

	
 
        for f in _parsed:
 
            st = f['stats']
 
            c.lines_added += st['added']
 
            c.lines_deleted += st['deleted']
 
            filename = f['filename']
 
            fid = h.FID('', filename)
 
            diff = diff_processor.as_html(enable_comments=True,
 
                                          parsed_lines=[f])
 
            c.file_diff_data.append((fid, None, f['operation'], f['old_filename'], filename, diff, st))
 

	
 
        # inline comments
 
        c.inline_cnt = 0
 
@@ -713,35 +713,35 @@ class PullrequestsController(BaseRepoCon
 

	
 
        Session().commit()
 

	
 
        if not request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            raise HTTPFound(location=pull_request.url())
 

	
 
        data = {
 
           'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
 
        }
 
        if comment is not None:
 
            c.comment = comment
 
            data.update(comment.get_dict())
 
            data.update({'rendered_text':
 
                         render('changeset/changeset_comment_block.html')})
 

	
 
        return data
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionLevelDecorator('read')
 
    @jsonify
 
    def delete_comment(self, repo_name, comment_id):
 
        co = ChangesetComment.get(comment_id)
 
        if co.pull_request.is_closed():
 
            #don't allow deleting comments on closed pull request
 
            # don't allow deleting comments on closed pull request
 
            raise HTTPForbidden()
 

	
 
        owner = co.author_id == request.authuser.user_id
 
        repo_admin = h.HasRepoPermissionLevel('admin')(c.repo_name)
 
        if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
 
            ChangesetCommentsModel().delete(comment=co)
 
            Session().commit()
 
            return True
 
        else:
 
            raise HTTPForbidden()
kallithea/controllers/root.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
from tgext.routes import RoutedController
 
from kallithea.config.routing import make_map
 
from kallithea.lib.base import BaseController
 
from kallithea.controllers.error import ErrorController
 
from tg import config
 

	
 

	
 
# This is the main Kallithea entry point; TurboGears will forward all requests
 
# to an instance of 'controller.root.RootController' in the configured
 
# 'application' module (set by app_cfg.py).  Requests are forwarded to
 
# controllers based on the routing mapper that lives in this root instance.
 
# The mapper is configured using routes defined in routing.py.  This use of the
 
# 'mapper' attribute is a feature of tgext.routes, which is activated by
 
# inheriting from its RoutedController class.
 
class RootController(RoutedController, BaseController):
 

	
 
    mapper = make_map(config)
 

	
 
    # the following assignment hooks in error handling
 
    error = ErrorController()
kallithea/lib/annotate.py
Show inline comments
 
@@ -128,58 +128,58 @@ class AnnotateHtmlFormatter(HtmlFormatte
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if i % sp == 0:
 
                        if aln:
 
                            lines.append('<a href="#%s-%d" class="special">'
 
                                         '%*d</a>' %
 
                                         (la, i, mw, i))
 
                        else:
 
                            lines.append('<span class="special">'
 
                                         '%*d</span>' % (mw, i))
 
                    else:
 
                        if aln:
 
                            lines.append('<a href="#%s-%d">'
 
                                         '%*d</a>' % (la, i, mw, i))
 
                        else:
 
                            lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 
        else:
 
            lines = []
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if aln:
 
                        lines.append('<a href="#%s-%d">%*d</a>' \
 
                        lines.append('<a href="#%s-%d">%*d</a>'
 
                                     % (la, i, mw, i))
 
                    else:
 
                        lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 

	
 
#        annotate_changesets = [tup[1] for tup in self.filenode.annotate]
 
##        TODO: not sure what that fixes
 
#        # TODO: not sure what that fixes
 
#        # If pygments cropped last lines break we need do that too
 
#        ln_cs = len(annotate_changesets)
 
#        ln_ = len(ls.splitlines())
 
#        if  ln_cs > ln_:
 
#            annotate_changesets = annotate_changesets[:ln_ - ln_cs]
 
        annotate = ''.join((self.annotate_from_changeset(el[2]())
 
                            for el in self.filenode.annotate))
 
        # in case you wonder about the seemingly redundant <div> here:
 
        # since the content in the other cell also is wrapped in a div,
 
        # some browsers in some configurations seem to mess up the formatting.
 
        '''
 
        yield 0, ('<table class="%stable">' % self.cssclass +
 
                  '<tr><td class="linenos"><div class="linenodiv"><pre>' +
 
                  ls + '</pre></div></td>' +
 
                  '<td class="code">')
 
        yield 0, dummyoutfile.getvalue()
 
        yield 0, '</td></tr></table>'
 

	
 
        '''
 
        headers_row = []
 
        if self.headers:
 
            headers_row = ['<tr class="annotate-header">']
 
            for key in self.order:
 
                td = ''.join(('<td>', self.headers[key], '</td>'))
kallithea/lib/auth.py
Show inline comments
 
@@ -83,74 +83,74 @@ class PasswordGenerator(object):
 

	
 
    def gen_password(self, length, alphabet=ALPHABETS_FULL):
 
        assert len(alphabet) <= 256, alphabet
 
        l = []
 
        while len(l) < length:
 
            i = ord(os.urandom(1))
 
            if i < len(alphabet):
 
                l.append(alphabet[i])
 
        return ''.join(l)
 

	
 

	
 
def get_crypt_password(password):
 
    """
 
    Cryptographic function used for password hashing based on pybcrypt
 
    or Python's own OpenSSL wrapper on windows
 

	
 
    :param password: password to hash
 
    """
 
    if is_windows:
 
        return hashlib.sha256(password).hexdigest()
 
    elif is_unix:
 
        import bcrypt
 
        return bcrypt.hashpw(safe_str(password), bcrypt.gensalt(10))
 
    else:
 
        raise Exception('Unknown or unsupported platform %s' \
 
        raise Exception('Unknown or unsupported platform %s'
 
                        % __platform__)
 

	
 

	
 
def check_password(password, hashed):
 
    """
 
    Checks matching password with it's hashed value, runs different
 
    implementation based on platform it runs on
 

	
 
    :param password: password
 
    :param hashed: password in hashed form
 
    """
 

	
 
    if is_windows:
 
        return hashlib.sha256(password).hexdigest() == hashed
 
    elif is_unix:
 
        import bcrypt
 
        print (safe_str(password), safe_str(hashed))
 
        print(safe_str(password), safe_str(hashed))
 
        try:
 
            return bcrypt.checkpw(safe_str(password), safe_str(hashed))
 
        except ValueError as e:
 
            # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors
 
            log.error('error from bcrypt checking password: %s', e)
 
            return False
 
    else:
 
        raise Exception('Unknown or unsupported platform %s' \
 
        raise Exception('Unknown or unsupported platform %s'
 
                        % __platform__)
 

	
 

	
 
def _cached_perms_data(user_id, user_is_admin, user_inherit_default_permissions,
 
                       explicit, algo):
 
    RK = 'repositories'
 
    GK = 'repositories_groups'
 
    UK = 'user_groups'
 
    GLOBAL = 'global'
 
    PERM_WEIGHTS = Permission.PERM_WEIGHTS
 
    permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()}
 

	
 
    def _choose_perm(new_perm, cur_perm):
 
        new_perm_val = PERM_WEIGHTS[new_perm]
 
        cur_perm_val = PERM_WEIGHTS[cur_perm]
 
        if algo == 'higherwin':
 
            if new_perm_val > cur_perm_val:
 
                return new_perm
 
            return cur_perm
 
        elif algo == 'lowerwin':
 
            if new_perm_val < cur_perm_val:
 
                return new_perm
 
            return cur_perm
 

	
 
@@ -240,49 +240,49 @@ def _cached_perms_data(user_id, user_is_
 
        'hg.create.none', 'hg.create.repository',
 
        'hg.usergroup.create.false', 'hg.usergroup.create.true'
 
    ])
 

	
 
    # USER GROUPS comes first
 
    # user group global permissions
 
    user_perms_from_users_groups = Session().query(UserGroupToPerm) \
 
        .options(joinedload(UserGroupToPerm.permission)) \
 
        .join((UserGroupMember, UserGroupToPerm.users_group_id ==
 
               UserGroupMember.users_group_id)) \
 
        .filter(UserGroupMember.user_id == user_id) \
 
        .join((UserGroup, UserGroupMember.users_group_id ==
 
               UserGroup.users_group_id)) \
 
        .filter(UserGroup.users_group_active == True) \
 
        .order_by(UserGroupToPerm.users_group_id) \
 
        .all()
 
    # need to group here by groups since user can be in more than
 
    # one group
 
    _grouped = [[x, list(y)] for x, y in
 
                itertools.groupby(user_perms_from_users_groups,
 
                                  lambda x:x.users_group)]
 
    for gr, perms in _grouped:
 
        # since user can be in multiple groups iterate over them and
 
        # select the lowest permissions first (more explicit)
 
        ##TODO: do this^^
 
        # TODO: do this^^
 
        if not gr.inherit_default_permissions:
 
            # NEED TO IGNORE all configurable permissions and
 
            # replace them with explicitly set
 
            permissions[GLOBAL] = permissions[GLOBAL] \
 
                                            .difference(_configurable)
 
        for perm in perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # user specific global permissions
 
    user_perms = Session().query(UserToPerm) \
 
            .options(joinedload(UserToPerm.permission)) \
 
            .filter(UserToPerm.user_id == user_id).all()
 

	
 
    if not user_inherit_default_permissions:
 
        # NEED TO IGNORE all configurable permissions and
 
        # replace them with explicitly set
 
        permissions[GLOBAL] = permissions[GLOBAL] \
 
                                        .difference(_configurable)
 

	
 
        for perm in user_perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 
    ## END GLOBAL PERMISSIONS
 

	
 
    #======================================================================
 
@@ -388,77 +388,77 @@ def _cached_perms_data(user_id, user_is_
 
    user_group_user_groups_perms = \
 
     Session().query(UserGroupUserGroupToPerm, Permission, UserGroup) \
 
     .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
 
            == UserGroup.users_group_id)) \
 
     .join((Permission, UserGroupUserGroupToPerm.permission_id
 
            == Permission.permission_id)) \
 
     .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
 
            == UserGroupMember.users_group_id)) \
 
     .filter(UserGroupMember.user_id == user_id) \
 
     .join((UserGroup, UserGroupMember.users_group_id ==
 
            UserGroup.users_group_id), aliased=True, from_joinpoint=True) \
 
     .filter(UserGroup.users_group_active == True) \
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_group_user_groups_perms:
 
        g_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][g_k] = p
 

	
 
    #user explicit permission for user groups
 
    # user explicit permission for user groups
 
    user_user_groups_perms = Permission.get_default_user_group_perms(user_id)
 
    for perm in user_user_groups_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][u_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][u_k] = p
 

	
 
    return permissions
 

	
 

	
 
def allowed_api_access(controller_name, whitelist=None, api_key=None):
 
    """
 
    Check if given controller_name is in whitelist API access
 
    """
 
    if not whitelist:
 
        from kallithea import CONFIG
 
        whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'),
 
                           sep=',')
 
        log.debug('whitelist of API access is: %s', whitelist)
 
    api_access_valid = controller_name in whitelist
 
    if api_access_valid:
 
        log.debug('controller:%s is in API whitelist', controller_name)
 
    else:
 
        msg = 'controller: %s is *NOT* in API whitelist' % (controller_name)
 
        if api_key:
 
            #if we use API key and don't have access it's a warning
 
            # if we use API key and don't have access it's a warning
 
            log.warning(msg)
 
        else:
 
            log.debug(msg)
 
    return api_access_valid
 

	
 

	
 
class AuthUser(object):
 
    """
 
    Represents a Kallithea user, including various authentication and
 
    authorization information. Typically used to store the current user,
 
    but is also used as a generic user information data structure in
 
    parts of the code, e.g. user management.
 

	
 
    Constructed from a database `User` object, a user ID or cookie dict,
 
    it looks up the user (if needed) and copies all attributes to itself,
 
    adding various non-persistent data. If lookup fails but anonymous
 
    access to Kallithea is enabled, the default user is loaded instead.
 

	
 
    `AuthUser` does not by itself authenticate users and the constructor
 
    sets the `is_authenticated` field to False. It's up to other parts
 
    of the code to check e.g. if a supplied password is correct, and if
 
    so, set `is_authenticated` to True.
 

	
 
    However, `AuthUser` does refuse to load a user that is not `active`.
 
@@ -489,49 +489,49 @@ class AuthUser(object):
 
        # These attributes will be overridden by fill_data, below, unless the
 
        # requested user cannot be found and the default anonymous user is
 
        # not enabled.
 
        self.user_id = None
 
        self.username = None
 
        self.api_key = None
 
        self.name = ''
 
        self.lastname = ''
 
        self.email = ''
 
        self.admin = False
 
        self.inherit_default_permissions = False
 

	
 
        # Look up database user, if necessary.
 
        if user_id is not None:
 
            log.debug('Auth User lookup by USER ID %s', user_id)
 
            dbuser = user_model.get(user_id)
 
        else:
 
            # Note: dbuser is allowed to be None.
 
            log.debug('Auth User lookup by database user %s', dbuser)
 

	
 
        is_user_loaded = self._fill_data(dbuser)
 

	
 
        # If user cannot be found, try falling back to anonymous.
 
        if not is_user_loaded:
 
            is_user_loaded =  self._fill_data(self._default_user)
 
            is_user_loaded = self._fill_data(self._default_user)
 

	
 
        self.is_default_user = (self.user_id == self._default_user.user_id)
 
        self.is_anonymous = not is_user_loaded or self.is_default_user
 

	
 
        if not self.username:
 
            self.username = 'None'
 

	
 
        log.debug('Auth User is now %s', self)
 

	
 
    def _fill_data(self, dbuser):
 
        """
 
        Copies database fields from a `db.User` to this `AuthUser`. Does
 
        not copy `api_keys` and `permissions` attributes.
 

	
 
        Checks that `dbuser` is `active` (and not None) before copying;
 
        returns True on success.
 
        """
 
        if dbuser is not None and dbuser.active:
 
            log.debug('filling %s data', dbuser)
 
            for k, v in dbuser.get_dict().iteritems():
 
                assert k not in ['api_keys', 'permissions']
 
                setattr(self, k, v)
 
            return True
 
        return False
kallithea/lib/auth_modules/__init__.py
Show inline comments
 
@@ -19,85 +19,84 @@ import logging
 
import traceback
 
import importlib
 

	
 
from kallithea.lib.utils2 import str2bool
 
from kallithea.lib.compat import formatted_json, hybrid_property
 
from kallithea.lib.auth import PasswordGenerator
 
from kallithea.model.user import UserModel
 
from kallithea.model.db import Setting, User
 
from kallithea.model.meta import Session
 
from kallithea.model.user_group import UserGroupModel
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class LazyFormencode(object):
 
    def __init__(self, formencode_obj, *args, **kwargs):
 
        self.formencode_obj = formencode_obj
 
        self.args = args
 
        self.kwargs = kwargs
 

	
 
    def __call__(self, *args, **kwargs):
 
        from inspect import isfunction
 
        formencode_obj = self.formencode_obj
 
        if isfunction(formencode_obj):
 
            #case we wrap validators into functions
 
            # case we wrap validators into functions
 
            formencode_obj = self.formencode_obj(*args, **kwargs)
 
        return formencode_obj(*self.args, **self.kwargs)
 

	
 

	
 
class KallitheaAuthPluginBase(object):
 
    auth_func_attrs = {
 
        "username": "unique username",
 
        "firstname": "first name",
 
        "lastname": "last name",
 
        "email": "email address",
 
        "groups": '["list", "of", "groups"]',
 
        "extern_name": "name in external source of record",
 
        "admin": 'True|False defines if user should be Kallithea admin',
 
        "active": 'True|False defines active state of user in Kallithea',
 
        "active_from_extern": "True|False|None, active state from the external auth, "
 
                              "None means use value from the auth plugin"
 
    }
 

	
 
    @property
 
    def validators(self):
 
        """
 
        Exposes Kallithea validators modules
 
        """
 
        # this is a hack to overcome issues with pylons threadlocals and
 
        # translator object _() not being registered properly.
 
        class LazyCaller(object):
 
            def __init__(self, name):
 
                self.validator_name = name
 

	
 
            def __call__(self, *args, **kwargs):
 
                from kallithea.model import validators as v
 
                obj = getattr(v, self.validator_name)
 
                #log.debug('Initializing lazy formencode object: %s', obj)
 
                return LazyFormencode(obj, *args, **kwargs)
 

	
 

	
 
        class ProxyGet(object):
 
            def __getattribute__(self, name):
 
                return LazyCaller(name)
 

	
 
        return ProxyGet()
 

	
 
    @hybrid_property
 
    def name(self):
 
        """
 
        Returns the name of this authentication plugin.
 

	
 
        :returns: string
 
        """
 
        raise NotImplementedError("Not implemented in base class")
 

	
 
    @hybrid_property
 
    def is_container_auth(self):
 
        """
 
        Returns bool if this module uses container auth.
 

	
 
        This property will trigger an automatic call to authenticate on
 
        a visit to the website or during a push/pull.
 

	
 
        :returns: bool
 
@@ -399,36 +398,37 @@ def authenticate(username, password, env
 
                username = user.username
 

	
 
        log.info('Authenticating user using %s plugin', module)
 

	
 
        # _authenticate is a wrapper for .auth() method of plugin.
 
        # it checks if .auth() sends proper data. For KallitheaExternalAuthPlugin
 
        # it also maps users to Database and maps the attributes returned
 
        # from .auth() to Kallithea database. If this function returns data
 
        # then auth is correct.
 
        user_data = plugin._authenticate(user, username, password,
 
                                           plugin_settings,
 
                                           environ=environ or {})
 
        log.debug('Plugin user data: %s', user_data)
 

	
 
        if user_data is not None:
 
            log.debug('Plugin returned proper authentication data')
 
            return user_data
 

	
 
        # we failed to Auth because .auth() method didn't return the user
 
        if username:
 
            log.warning("User `%s` failed to authenticate against %s",
 
                        username, module)
 
    return None
 

	
 

	
 
def get_managed_fields(user):
 
    """return list of fields that are managed by the user's auth source, usually some of
 
    'username', 'firstname', 'lastname', 'email', 'active', 'password'
 
    """
 
    auth_plugins = get_auth_plugins()
 
    for plugin in auth_plugins:
 
        module = plugin.__class__.__module__
 
        log.debug('testing %s (%s) with auth plugin %s', user, user.extern_type, module)
 
        if plugin.name == user.extern_type:
 
            return plugin.get_managed_fields()
 
    log.error('no auth plugin %s found for %s', user.extern_type, user)
 
    return [] # TODO: Fail badly instead of allowing everything to be edited?
kallithea/lib/base.py
Show inline comments
 
@@ -269,49 +269,49 @@ class BaseVCSController(object):
 
        log.debug('PRE-AUTH got %s as username', username)
 

	
 
        # If not authenticated by the container, running basic auth
 
        if not username:
 
            self.authenticate.realm = safe_str(self.config['realm'])
 
            result = self.authenticate(environ)
 
            if isinstance(result, str):
 
                paste.httpheaders.AUTH_TYPE.update(environ, 'basic')
 
                paste.httpheaders.REMOTE_USER.update(environ, result)
 
                username = result
 
            else:
 
                return None, result.wsgi_application
 

	
 
        #==============================================================
 
        # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
 
        #==============================================================
 
        try:
 
            user = User.get_by_username_or_email(username)
 
            if user is None or not user.active:
 
                return None, webob.exc.HTTPForbidden()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return None, webob.exc.HTTPInternalServerError()
 

	
 
        #check permissions for this repository
 
        # check permissions for this repository
 
        perm = self._check_permission(action, user, repo_name, ip_addr)
 
        if not perm:
 
            return None, webob.exc.HTTPForbidden()
 

	
 
        return user, None
 

	
 
    def _handle_request(self, environ, start_response):
 
        raise NotImplementedError()
 

	
 
    def _get_by_id(self, repo_name):
 
        """
 
        Gets a special pattern _<ID> from clone url and tries to replace it
 
        with a repository_name for support of _<ID> permanent URLs
 

	
 
        :param repo_name:
 
        """
 

	
 
        data = repo_name.split('/')
 
        if len(data) >= 2:
 
            from kallithea.lib.utils import get_repo_by_id
 
            by_id_match = get_repo_by_id(repo_name)
 
            if by_id_match:
 
                data[1] = safe_str(by_id_match)
 

	
 
@@ -501,49 +501,49 @@ class BaseController(TGController):
 

	
 
        # Make sure CSRF token never appears in the URL. If so, invalidate it.
 
        if secure_form.token_key in request.GET:
 
            log.error('CSRF key leak detected')
 
            session.pop(secure_form.token_key, None)
 
            session.save()
 
            from kallithea.lib import helpers as h
 
            h.flash(_('CSRF token leak has been detected - all form tokens have been expired'),
 
                    category='error')
 

	
 
        # WebOb already ignores request payload parameters for anything other
 
        # than POST/PUT, but double-check since other Kallithea code relies on
 
        # this assumption.
 
        if request.method not in ['POST', 'PUT'] and request.POST:
 
            log.error('%r request with payload parameters; WebOb should have stopped this', request.method)
 
            raise webob.exc.HTTPBadRequest()
 

	
 
    def __call__(self, environ, context):
 
        try:
 
            request.ip_addr = _get_ip_addr(environ)
 
            # make sure that we update permissions each time we call controller
 

	
 
            self._basic_security_checks()
 

	
 
            #set globals for auth user
 
            # set globals for auth user
 

	
 
            bearer_token = None
 
            try:
 
                # Request.authorization may raise ValueError on invalid input
 
                type, params = request.authorization
 
            except (ValueError, TypeError):
 
                pass
 
            else:
 
                if type.lower() == 'bearer':
 
                    bearer_token = params
 

	
 
            request.authuser = request.user = self._determine_auth_user(
 
                request.GET.get('api_key'),
 
                bearer_token,
 
                session.get('authuser'),
 
            )
 

	
 
            log.info('IP: %s User: %s accessed %s',
 
                request.ip_addr, request.authuser,
 
                safe_unicode(_get_access_path(environ)),
 
            )
 
            return super(BaseController, self).__call__(environ, context)
 
        except webob.exc.HTTPException as e:
 
            return e
kallithea/lib/caching_query.py
Show inline comments
 
@@ -199,42 +199,42 @@ class FromCache(MapperOption):
 
        some other identifier.
 

	
 
        """
 
        self.region = region
 
        self.namespace = namespace
 
        self.cache_key = cache_key
 

	
 
    def process_query(self, query):
 
        """Process a Query during normal loading operation."""
 

	
 
        _set_cache_parameters(query, self.region, self.namespace,
 
                              self.cache_key)
 

	
 

	
 
def _params_from_query(query):
 
    """Pull the bind parameter values from a query.
 

	
 
    This takes into account any scalar attribute bindparam set up.
 

	
 
    E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
 
    would return [5, 7].
 

	
 
    """
 
    v = []
 

	
 
    def visit_bindparam(bind):
 

	
 
        if bind.key in query._params:
 
            value = query._params[bind.key]
 
        elif bind.callable:
 
            # lazyloader may dig a callable in here, intended
 
            # to late-evaluate params after autoflush is called.
 
            # convert to a scalar value.
 
            value = bind.callable()
 
        else:
 
            value = bind.value
 

	
 
        v.append(value)
 
    if query._criterion is not None:
 
        visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam})
 
        visitors.traverse(query._criterion, {}, {'bindparam': visit_bindparam})
 
    for f in query._from_obj:
 
        visitors.traverse(f, {}, {'bindparam':visit_bindparam})
 
        visitors.traverse(f, {}, {'bindparam': visit_bindparam})
 
    return v
kallithea/lib/celerylib/__init__.py
Show inline comments
 
@@ -53,48 +53,49 @@ class FakeTask(object):
 
        self.result = result
 

	
 
    def failed(self):
 
        return False
 

	
 
    traceback = None # if failed
 

	
 
    task_id = None
 

	
 

	
 
def task(f_org):
 
    """Wrapper of celery.task.task, running async if CELERY_ON
 
    """
 

	
 
    if CELERY_ON:
 
        def f_async(*args, **kwargs):
 
            log.info('executing %s task', f_org.__name__)
 
            try:
 
                f_org(*args, **kwargs)
 
            finally:
 
                log.info('executed %s task', f_org.__name__)
 
        f_async.__name__ = f_org.__name__
 
        from kallithea.lib import celerypylons
 
        runner = celerypylons.task(ignore_result=True)(f_async)
 

	
 
        def f_wrapped(*args, **kwargs):
 
            t = runner.apply_async(args=args, kwargs=kwargs)
 
            log.info('executing task %s in async mode - id %s', f_org, t.task_id)
 
            return t
 
    else:
 
        def f_wrapped(*args, **kwargs):
 
            log.info('executing task %s in sync', f_org.__name__)
 
            try:
 
                result = f_org(*args, **kwargs)
 
            except Exception as e:
 
                log.error('exception executing sync task %s in sync: %r', f_org.__name__, e)
 
                raise # TODO: return this in FakeTask as with async tasks?
 
            return FakeTask(result)
 

	
 
    return f_wrapped
 

	
 

	
 
def __get_lockkey(func, *fargs, **fkwargs):
 
    params = list(fargs)
 
    params.extend(['%s-%s' % ar for ar in fkwargs.items()])
 

	
 
    func_name = str(func.__name__) if hasattr(func, '__name__') else str(func)
 

	
 
    lockkey = 'task_%s.lock' % \
kallithea/lib/celerylib/tasks.py
Show inline comments
 
@@ -159,49 +159,49 @@ def get_commits_stats(repo_name, ts_min_
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data'] \
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time":k,
 
                                                 "commits":1,
 
                                                 "added":len(cs.added),
 
                                                 "changed":len(cs.changed),
 
                                                 "removed":len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            #gather all data by day
 
            # gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = json.dumps(co_day_auth_aggr)
 
        stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
        log.debug('last revision %s', last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s', leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
@@ -300,48 +300,49 @@ def send_email(recipients, subject, body
 
              "recipients: %s\n"
 
              "headers: %s\n"
 
              "subject: %s\n"
 
              "body:\n%s\n"
 
              "html:\n%s\n"
 
              % (' '.join(recipients), headers, subject, body, html_body))
 

	
 
    if mail_server:
 
        log.debug("Sending e-mail. " + logmsg)
 
    else:
 
        log.error("SMTP mail server not configured - cannot send e-mail.")
 
        log.warning(logmsg)
 
        return False
 

	
 
    try:
 
        m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body, html_body, headers=headers)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 

	
 
@celerylib.task
 
@celerylib.dbsession
 
def create_repo(form_data, cur_user):
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.db import Setting
 

	
 
    DBS = celerylib.get_session()
 

	
 
    cur_user = User.guess_instance(cur_user)
 

	
 
    owner = cur_user
 
    repo_name = form_data['repo_name']
 
    repo_name_full = form_data['repo_name_full']
 
    repo_type = form_data['repo_type']
 
    description = form_data['repo_description']
 
    private = form_data['repo_private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['repo_landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 
    copy_group_permissions = form_data.get('repo_copy_permissions')
 
    fork_of = form_data.get('fork_parent_id')
 
    state = form_data.get('repo_state', Repository.STATE_PENDING)
 

	
 
@@ -450,49 +451,49 @@ def create_repo_fork(form_data, cur_user
 
                      fork_of.repo_name, '')
 
        DBS.commit()
 

	
 
        source_repo_path = os.path.join(base_path, fork_of.repo_name)
 

	
 
        # now create this repo on Filesystem
 
        RepoModel()._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoGroup.guess_instance(repo_group),
 
            clone_uri=source_repo_path,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception as e:
 
        log.warning('Exception %s occurred when forking repository, '
 
                    'doing cleanup...' % e)
 
        #rollback things manually !
 
        # rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel()._delete_filesystem_repo(repo)
 
        raise
 

	
 
    return True
 

	
 

	
 
def __get_codes_stats(repo_name):
 
    from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP
 
    repo = Repository.get_by_repo_name(repo_name).scm_instance
 

	
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            ext = lower(f.extension)
 
            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
 
                if ext in code_stats:
 
                    code_stats[ext] += 1
 
                else:
kallithea/lib/celerypylons/__init__.py
Show inline comments
 
@@ -6,49 +6,51 @@ Kallithea wrapper of Celery
 
The Celery configuration is in the Kallithea ini file but must be converted to an
 
entirely different format before Celery can use it.
 

	
 
We read the configuration from tg.config at module import time. This module can
 
thus not be imported in global scope but must be imported on demand in function
 
scope after tg.config has been initialized.
 

	
 
To make sure that the config really has been initialized, we check one of the
 
mandatory settings.
 
"""
 

	
 
import os
 
import warnings
 

	
 
import tg
 
import celery
 

	
 

	
 
def celery_config(config):
 
    """Return Celery config object populated from relevant settings in a config dict, such as tg.config"""
 

	
 
    # Verify .ini file configuration has been loaded
 
    assert config['celery.imports'] == 'kallithea.lib.celerylib.tasks', 'Kallithea Celery configuration has not been loaded'
 

	
 
    class CeleryConfig(object): pass
 
    class CeleryConfig(object):
 
        pass
 

	
 
    celery_config = CeleryConfig()
 

	
 
    PREFIXES = """ADMINS BROKER CASSANDRA CELERYBEAT CELERYD CELERYMON CELERY EMAIL SERVER""".split()
 
    LIST_PARAMS = """CELERY_IMPORTS ADMINS ROUTES CELERY_ACCEPT_CONTENT""".split()
 

	
 
    for config_key, config_value in sorted(config.items()):
 
        celery_key = config_key.replace('.', '_').upper()
 
        if celery_key.split('_', 1)[0] not in PREFIXES:
 
            continue
 
        if not isinstance(config_value, basestring):
 
            continue
 
        if celery_key in LIST_PARAMS:
 
            celery_value = config_value.split()
 
        elif config_value.isdigit():
 
            celery_value = int(config_value)
 
        elif config_value.lower() in ['true', 'false']:
 
            celery_value = config_value.lower() == 'true'
 
        else:
 
            celery_value = config_value
 
        setattr(celery_config, celery_key, celery_value)
 
    return celery_config
 

	
 

	
 
# Create celery app from the TurboGears configuration file
kallithea/lib/compat.py
Show inline comments
 
@@ -34,67 +34,69 @@ import functools
 
#==============================================================================
 
# json
 
#==============================================================================
 
from kallithea.lib.ext_json import json
 

	
 
# alias for formatted json
 
formatted_json = functools.partial(json.dumps, indent=4, sort_keys=True)
 

	
 

	
 
#==============================================================================
 
# unittest
 
#==============================================================================
 
if sys.version_info >= (2, 7):
 
    import unittest
 
else:
 
    import unittest2 as unittest
 

	
 

	
 
#==============================================================================
 
# OrderedDict - Python 2.7 could perhaps use collections.OrderedDict
 
#==============================================================================
 

	
 
# Python Software Foundation License
 

	
 

	
 
# XXX: it feels like using the class with "is" and "is not" instead of "==" and
 
# "!=" should be faster.
 
class _Nil(object):
 

	
 
    def __repr__(self):
 
        return "nil"
 

	
 
    def __eq__(self, other):
 
        if (isinstance(other, _Nil)):
 
            return True
 
        else:
 
            return NotImplemented
 

	
 
    def __ne__(self, other):
 
        if (isinstance(other, _Nil)):
 
            return False
 
        else:
 
            return NotImplemented
 

	
 

	
 
_nil = _Nil()
 

	
 

	
 
class OrderedDict(dict):
 
    """Ordered dict data structure, with O(1) complexity for dict operations
 
    that modify one element.
 

	
 
    Overwriting values doesn't change their original sequential order.
 
    """
 

	
 
    def __init__(self, data=(), **kwds):
 
        """This doesn't accept keyword initialization as normal dicts to avoid
 
        a trap - inside a function or method the keyword args are accessible
 
        only as a dict, without a defined order, so their original order is
 
        lost.
 
        """
 
        if kwds:
 
            raise TypeError("__init__() of ordered dict takes no keyword "
 
                            "arguments to avoid an ordering trap.")
 
        dict.__init__(self)
 
        # If you give a normal dict, then the order of elements is undefined
 
        if hasattr(data, "iteritems"):
 
            for key, val in data.iteritems():
 
                self[key] = val
kallithea/lib/db_manage.py
Show inline comments
 
@@ -63,49 +63,49 @@ def notify(msg):
 

	
 

	
 
class DbManage(object):
 
    def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None):
 
        self.dbname = dbconf.split('/')[-1]
 
        self.tests = tests
 
        self.root = root
 
        self.dburi = dbconf
 
        self.db_exists = False
 
        self.cli_args = cli_args or {}
 
        self.init_db(SESSION=SESSION)
 

	
 
    def _ask_ok(self, msg):
 
        """Invoke ask_ok unless the force_ask option provides the answer"""
 
        force_ask = self.cli_args.get('force_ask')
 
        if force_ask is not None:
 
            return force_ask
 
        from kallithea.lib.paster_commands.common import ask_ok
 
        return ask_ok(msg)
 

	
 
    def init_db(self, SESSION=None):
 
        if SESSION:
 
            self.sa = SESSION
 
        else:
 
            #init new sessions
 
            # init new sessions
 
            engine = create_engine(self.dburi)
 
            init_model(engine)
 
            self.sa = Session()
 

	
 
    def create_tables(self, override=False):
 
        """
 
        Create a auth database
 
        """
 

	
 
        log.info("Any existing database is going to be destroyed")
 
        if self.tests:
 
            destroy = True
 
        else:
 
            destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]')
 
        if not destroy:
 
            print 'Nothing done.'
 
            sys.exit(0)
 
        if destroy:
 
            # drop and re-create old schemas
 

	
 
            url = sqlalchemy.engine.url.make_url(self.dburi)
 
            database = url.database
 

	
 
            # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work
 
@@ -184,78 +184,78 @@ class DbManage(object):
 
            import getpass
 

	
 
            username = self.cli_args.get('username')
 
            password = self.cli_args.get('password')
 
            email = self.cli_args.get('email')
 

	
 
            def get_password():
 
                password = getpass.getpass('Specify admin password '
 
                                           '(min 6 chars):')
 
                confirm = getpass.getpass('Confirm password:')
 

	
 
                if password != confirm:
 
                    log.error('passwords mismatch')
 
                    return False
 
                if len(password) < 6:
 
                    log.error('password is to short use at least 6 characters')
 
                    return False
 

	
 
                return password
 
            if username is None:
 
                username = raw_input('Specify admin username:')
 
            if password is None:
 
                password = get_password()
 
                if not password:
 
                    #second try
 
                    # second try
 
                    password = get_password()
 
                    if not password:
 
                        sys.exit()
 
            if email is None:
 
                email = raw_input('Specify admin email:')
 
            self.create_user(username, password, email, True)
 
        else:
 
            log.info('creating admin and regular test users')
 
            from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \
 
            TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
 
            TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
 
            TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
 
            TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
 

	
 
            self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
 
                             TEST_USER_ADMIN_EMAIL, True)
 

	
 
            self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
 
                             TEST_USER_REGULAR_EMAIL, False)
 

	
 
            self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
 
                             TEST_USER_REGULAR2_EMAIL, False)
 

	
 
    def create_ui_settings(self, repo_store_path):
 
        """
 
        Creates ui settings, fills out hooks
 
        """
 

	
 
        #HOOKS
 
        # HOOKS
 
        hooks1_key = Ui.HOOK_UPDATE
 
        hooks1_ = Ui.query() \
 
            .filter(Ui.ui_key == hooks1_key).scalar()
 

	
 
        hooks1 = Ui() if hooks1_ is None else hooks1_
 
        hooks1.ui_section = 'hooks'
 
        hooks1.ui_key = hooks1_key
 
        hooks1.ui_value = 'hg update >&2'
 
        hooks1.ui_active = False
 
        self.sa.add(hooks1)
 

	
 
        hooks2_key = Ui.HOOK_REPO_SIZE
 
        hooks2_ = Ui.query() \
 
            .filter(Ui.ui_key == hooks2_key).scalar()
 
        hooks2 = Ui() if hooks2_ is None else hooks2_
 
        hooks2.ui_section = 'hooks'
 
        hooks2.ui_key = hooks2_key
 
        hooks2.ui_value = 'python:kallithea.lib.hooks.repo_size'
 
        self.sa.add(hooks2)
 

	
 
        hooks3 = Ui()
 
        hooks3.ui_section = 'hooks'
 
        hooks3.ui_key = Ui.HOOK_PUSH
 
        hooks3.ui_value = 'python:kallithea.lib.hooks.log_push_action'
kallithea/lib/dbmigrate/__init__.py
Show inline comments
 
from gearbox.command import Command
 

	
 

	
 
class UpgradeDb(Command):
 
    '''(removed)'''
 

	
 
    deprecated = True
 

	
 
    def run(self, args):
 
        raise SystemExit(
 
            'The "paster upgrade-db" command has been removed; please see the docs:\n'
 
            '    https://kallithea.readthedocs.io/en/default/upgrade.html'
 
        )
kallithea/lib/diffs.py
Show inline comments
 
@@ -109,48 +109,49 @@ def get_gitdiff(filenode_old, filenode_n
 
    Returns git style diff between given ``filenode_old`` and ``filenode_new``.
 

	
 
    :param ignore_whitespace: ignore whitespaces in diff
 
    """
 
    # make sure we pass in default context
 
    context = context or 3
 
    submodules = filter(lambda o: isinstance(o, SubModuleNode),
 
                        [filenode_new, filenode_old])
 
    if submodules:
 
        return ''
 

	
 
    for filenode in (filenode_old, filenode_new):
 
        if not isinstance(filenode, FileNode):
 
            raise VCSError("Given object should be FileNode object, not %s"
 
                % filenode.__class__)
 

	
 
    repo = filenode_new.changeset.repository
 
    old_raw_id = getattr(filenode_old.changeset, 'raw_id', repo.EMPTY_CHANGESET)
 
    new_raw_id = getattr(filenode_new.changeset, 'raw_id', repo.EMPTY_CHANGESET)
 

	
 
    vcs_gitdiff = repo.get_diff(old_raw_id, new_raw_id, filenode_new.path,
 
                                ignore_whitespace, context)
 
    return vcs_gitdiff
 

	
 

	
 
NEW_FILENODE = 1
 
DEL_FILENODE = 2
 
MOD_FILENODE = 3
 
RENAMED_FILENODE = 4
 
COPIED_FILENODE = 5
 
CHMOD_FILENODE = 6
 
BIN_FILENODE = 7
 

	
 

	
 
class DiffLimitExceeded(Exception):
 
    pass
 

	
 

	
 
class LimitedDiffContainer(object):
 

	
 
    def __init__(self, diff_limit, cur_diff_size, diff):
 
        self.diff = diff
 
        self.diff_limit = diff_limit
 
        self.cur_diff_size = cur_diff_size
 

	
 
    def __iter__(self):
 
        for l in self.diff:
 
            yield l
 

	
 
@@ -184,49 +185,48 @@ class DiffProcessor(object):
 
        # has already been split on this:
 
        # ^diff[ ]--git
 
            [ ]a/(?P<a_path>.+?)[ ]b/(?P<b_path>.+?)\n
 
        (?:^old[ ]mode[ ](?P<old_mode>\d+)\n
 
           ^new[ ]mode[ ](?P<new_mode>\d+)(?:\n|$))?
 
        (?:^similarity[ ]index[ ](?P<similarity_index>\d+)%(?:\n|$))?
 
        (?:^rename[ ]from[ ](?P<rename_from>.+)\n
 
           ^rename[ ]to[ ](?P<rename_to>.+)(?:\n|$))?
 
        (?:^copy[ ]from[ ](?P<copy_from>.+)\n
 
           ^copy[ ]to[ ](?P<copy_to>.+)(?:\n|$))?
 
        (?:^new[ ]file[ ]mode[ ](?P<new_file_mode>.+)(?:\n|$))?
 
        (?:^deleted[ ]file[ ]mode[ ](?P<deleted_file_mode>.+)(?:\n|$))?
 
        (?:^index[ ](?P<a_blob_id>[0-9A-Fa-f]+)
 
            \.\.(?P<b_blob_id>[0-9A-Fa-f]+)[ ]?(?P<b_mode>.+)?(?:\n|$))?
 
        (?:^(?P<bin_patch>GIT[ ]binary[ ]patch)(?:\n|$))?
 
        (?:^---[ ](a/(?P<a_file>.+?)|/dev/null)\t?(?:\n|$))?
 
        (?:^\+\+\+[ ](b/(?P<b_file>.+?)|/dev/null)\t?(?:\n|$))?
 
    """, re.VERBOSE | re.MULTILINE)
 

	
 
    # Used for inline highlighter word split, must match the substitutions in _escaper
 
    _token_re = re.compile(r'()(&amp;|&lt;|&gt;|<u>\t</u>|<u class="cr"></u>| <i></i>|\W+?)')
 

	
 
    _escape_re = re.compile(r'(&)|(<)|(>)|(\t)|(\r)|(?<=.)( \n| $)')
 

	
 

	
 
    def __init__(self, diff, vcs='hg', format='gitdiff', diff_limit=None):
 
        """
 
        :param diff:   a text in diff format
 
        :param vcs: type of version control hg or git
 
        :param format: format of diff passed, `udiff` or `gitdiff`
 
        :param diff_limit: define the size of diff that is considered "big"
 
            based on that parameter cut off will be triggered, set to None
 
            to show full diff
 
        """
 
        if not isinstance(diff, basestring):
 
            raise Exception('Diff must be a basestring got %s instead' % type(diff))
 

	
 
        self._diff = diff
 
        self._format = format
 
        self.adds = 0
 
        self.removes = 0
 
        # calculate diff size
 
        self.diff_size = len(diff)
 
        self.diff_limit = diff_limit
 
        self.cur_diff_size = 0
 
        self.parsed = False
 
        self.parsed_diff = []
 
        self.vcs = vcs
 

	
 
@@ -354,57 +354,57 @@ class DiffProcessor(object):
 

	
 
            a_path, b_path, similarity_index, rename_from, rename_to,
 
            old_mode, new_mode, new_file_mode, deleted_file_mode,
 
            a_blob_id, b_blob_id, b_mode, a_file, b_file
 

	
 
        :param diff_chunk:
 
        """
 

	
 
        match = None
 
        if self.vcs == 'git':
 
            match = self._git_header_re.match(diff_chunk)
 
        elif self.vcs == 'hg':
 
            match = self._hg_header_re.match(diff_chunk)
 
        if match is None:
 
            raise Exception('diff not recognized as valid %s diff' % self.vcs)
 
        groups = match.groupdict()
 
        rest = diff_chunk[match.end():]
 
        if rest and not rest.startswith('@') and not rest.startswith('literal ') and not rest.startswith('delta '):
 
            raise Exception('cannot parse %s diff header: %r followed by %r' % (self.vcs, diff_chunk[:match.end()], rest[:1000]))
 
        difflines = imap(self._escaper, re.findall(r'.*\n|.+$', rest)) # don't split on \r as str.splitlines do
 
        return groups, difflines
 

	
 
    def _clean_line(self, line, command):
 
        if command in ['+', '-', ' ']:
 
            #only modify the line if it's actually a diff thing
 
            # only modify the line if it's actually a diff thing
 
            line = line[1:]
 
        return line
 

	
 
    def _parse_gitdiff(self, inline_diff=True):
 
        _files = []
 
        diff_container = lambda arg: arg
 

	
 
        ##split the diff in chunks of separate --git a/file b/file chunks
 
        # split the diff in chunks of separate --git a/file b/file chunks
 
        for raw_diff in ('\n' + self._diff).split('\ndiff --git')[1:]:
 
            head, diff = self._get_header(raw_diff)
 

	
 
            op = None
 
            stats = {
 
                'added': 0,
 
                'deleted': 0,
 
                'binary': False,
 
                'ops': {},
 
            }
 

	
 
            if head['deleted_file_mode']:
 
                op = 'D'
 
                stats['binary'] = True
 
                stats['ops'][DEL_FILENODE] = 'deleted file'
 

	
 
            elif head['new_file_mode']:
 
                op = 'A'
 
                stats['binary'] = True
 
                stats['ops'][NEW_FILENODE] = 'new file %s' % head['new_file_mode']
 
            else:  # modify operation, can be cp, rename, chmod
 
                # CHMOD
 
                if head['new_mode'] and head['old_mode']:
 
                    op = 'M'
kallithea/lib/ext_json.py
Show inline comments
 
"""
 
Extended JSON encoder for json
 

	
 
json.org does not specify how date time can be represented - monkeypatch it to do something.
 
"""
 

	
 
import datetime
 
import functools
 
import decimal
 
import json # is re-exported after monkey patching
 

	
 
__all__ = ['json']
 

	
 

	
 
def _is_tz_aware(value):
 
    """
 
    Determines if a given datetime.time is timezone aware.
 

	
 
    The logic is described in Python's docs:
 
    http://docs.python.org/library/datetime.html#datetime.tzinfo
 
    """
 
    return (value.tzinfo is not None
 
            and value.tzinfo.utcoffset(value) is not None)
 

	
 

	
 
def _obj_dump(obj):
 
    """
 
    Custom function for dumping objects to JSON, if obj has __json__ attribute
 
    or method defined it will be used for serialization
 

	
 
    :param obj:
 
    """
 

	
 
    if isinstance(obj, complex):
 
        return [obj.real, obj.imag]
 
    # See "Date Time String Format" in the ECMA-262 specification.
 
    # some code borrowed from django 1.4
 
    elif isinstance(obj, datetime.datetime):
 
        r = obj.isoformat()
 
        if obj.microsecond:
 
            r = r[:23] + r[26:]
 
        if r.endswith('+00:00'):
 
            r = r[:-6] + 'Z'
 
        return r
 
    elif isinstance(obj, datetime.date):
 
        return obj.isoformat()
 
    elif isinstance(obj, decimal.Decimal):
 
        return str(obj)
 
    elif isinstance(obj, datetime.time):
kallithea/lib/graphmod.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Modified mercurial DAG graph functions that re-uses VCS structure
 

	
 
It allows to have a shared codebase for DAG generation for hg and git repos
 
"""
 

	
 
nullrev = -1
 

	
 

	
 
def _first_known_ancestors(parentrev_func, minrev, knownrevs, head):
 
    """
 
    Return the apparent parents of the head revision in a filtered DAG.
 
    This is like calling plain parentrev_func, except that if the parent has
 
    been filtered (isn't in knownrevs), recurse on its parents.
 
    For ancestors that are unknown in knownrevs, the revision number is negated.
 
    (This means that a Mercurial revision can have more than 2 "parents".)
 

	
 
    parentrev_func defines the full DAG.
 
    knownrevs contains the subset we care about. minrev is lower bound on knownrevs.
 
    """
 
    pending = set([head])
 
    seen = set()
 
    ancestors = set()
 
    while pending:
 
        r = pending.pop()
 
        if r < minrev:
 
            if r > nullrev: # ignore -1 returned from parentrev_func
 
                ancestors.add(-head) # fake unique unknown parent for this rev
 
        elif r in knownrevs:
 
            ancestors.add(r)
 
        elif r not in seen:
 
            pending.update(parentrev_func(r))
 
            seen.add(r)
 
    return ancestors
 

	
 

	
 
def graph_data(repo, revs):
 
    """Return a DAG with colored edge information for revs
 

	
 
    For each DAG node this function emits tuples::
 

	
 
      ((col, color), [(col, nextcol, color)])
 

	
 
    with the following new elements:
 

	
 
      - Tuple (col, color) with column and color index for the current node
 
      - A list of tuples indicating the edges between the current node and its
 
        parents.
 
    """
 
    dag = _dagwalker(repo, revs)
 
    return list(_colored(repo, dag))
 

	
 

	
 
def _dagwalker(repo, revs):
 
    """Iterate over revs, yielding revs (highest first) and parents to show in the graph."""
 
    if not revs:
 
        return
 

	
 
    if repo.alias == 'hg':
 
        parentrev_func = repo._repo.changelog.parentrevs
 
    elif repo.alias == 'git':
 
        def parentrev_func(rev):
 
            return [x.revision for x in repo[rev].parents]
 

	
 
    minrev = revs[-1] # assuming sorted with highest revision numbers first
 
    knownrevs = set(revs)
 
    acache = {}
 
    for rev in revs:
 
        parents = set(parentrev_func(rev)) - set([nullrev])
 
        dagparents = parents & knownrevs
 
        # Calculate indirect parents
 
        for p in parents - dagparents:
 
            ancestors = acache.get(p)
 
            if ancestors is None:
 
                ancestors = acache[p] = _first_known_ancestors(parentrev_func, minrev, knownrevs, p)
 
            dagparents.update(ancestors)
 

	
 
        yield (rev, sorted(dagparents))
 

	
 

	
 
def _colored(repo, dag):
 
    """annotates a DAG with colored edge information
 

	
 
    For each DAG node this function emits tuples::
 

	
 
      ((col, color), [(col, nextcol, color)])
 

	
 
    with the following new elements:
 

	
 
      - Tuple (col, color) with column and color index for the current node
 
      - A list of tuples indicating the edges between the current node and its
 
        parents.
 
    """
 
    branch_cache = {}
 

	
 
    def branch(rev):
 
        """Return branch for rev, using cache for efficiency.
 
        For Mercurial, always return the named branch name (which may be 'default').
 
        For Git, return a branch name for branch heads, otherwise None."""
 
        if rev not in branch_cache:
 
            branch_cache[rev] = repo[rev].branch
 
        return branch_cache[rev]
 

	
 
    row = []  # the ancestor revision that each column is tracking
 
    colors = {}  # color number for revisions - set by descendants
 
    obs = {}  # obsolete flag for revisions - set by descendants
 
    newcolor = 1  # the next available color
 

	
 
    for (rev, dagparents) in dag:
 

	
 
        # Compute row and nextrow
 
        if rev not in row:
 
            row.append(rev)  # new head
 
            colors[rev] = newcolor
 
            obs[rev] = int(repo[rev].obsolete)
 
            newcolor += 1
 

	
 
        col = row.index(rev)
 
        addparents = [p for p in dagparents if p not in row]
kallithea/lib/helpers.py
Show inline comments
 
@@ -43,174 +43,179 @@ from webhelpers.html.tags import _set_in
 
from kallithea.config.routing import url
 
from kallithea.lib.annotate import annotate_highlight
 
from kallithea.lib.pygmentsutils import get_custom_lexer
 
from kallithea.lib.utils2 import str2bool, safe_unicode, safe_str, \
 
    time_to_datetime, AttributeDict, safe_int, MENTIONS_REGEX
 
from kallithea.lib.markup_renderer import url_re
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def canonical_url(*args, **kargs):
 
    '''Like url(x, qualified=True), but returns url that not only is qualified
 
    but also canonical, as configured in canonical_url'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        kargs['host'] = parts[1].split('/', 1)[0]
 
        kargs['protocol'] = parts[0]
 
    except IndexError:
 
        kargs['qualified'] = True
 
    return url(*args, **kargs)
 

	
 

	
 
def canonical_hostname():
 
    '''Return canonical hostname of system'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        return parts[1].split('/', 1)[0]
 
    except IndexError:
 
        parts = url('home', qualified=True).split('://', 1)
 
        return parts[1].split('/', 1)[0]
 

	
 

	
 
def html_escape(s):
 
    """Return string with all html escaped.
 
    This is also safe for javascript in html but not necessarily correct.
 
    """
 
    return (s
 
        .replace('&', '&amp;')
 
        .replace(">", "&gt;")
 
        .replace("<", "&lt;")
 
        .replace('"', "&quot;")
 
        .replace("'", "&apos;")
 
        )
 

	
 
def js(value):
 
    """Convert Python value to the corresponding JavaScript representation.
 

	
 
    This is necessary to safely insert arbitrary values into HTML <script>
 
    sections e.g. using Mako template expression substitution.
 

	
 
    Note: Rather than using this function, it's preferable to avoid the
 
    insertion of values into HTML <script> sections altogether. Instead,
 
    data should (to the extent possible) be passed to JavaScript using
 
    data attributes or AJAX calls, eliminating the need for JS specific
 
    escaping.
 

	
 
    Note: This is not safe for use in attributes (e.g. onclick), because
 
    quotes are not escaped.
 

	
 
    Because the rules for parsing <script> varies between XHTML (where
 
    normal rules apply for any special characters) and HTML (where
 
    entities are not interpreted, but the literal string "</script>"
 
    is forbidden), the function ensures that the result never contains
 
    '&', '<' and '>', thus making it safe in both those contexts (but
 
    not in attributes).
 
    """
 
    return literal(
 
        ('(' + json.dumps(value) + ')')
 
        # In JSON, the following can only appear in string literals.
 
        .replace('&', r'\x26')
 
        .replace('<', r'\x3c')
 
        .replace('>', r'\x3e')
 
    )
 

	
 

	
 
def jshtml(val):
 
    """HTML escapes a string value, then converts the resulting string
 
    to its corresponding JavaScript representation (see `js`).
 

	
 
    This is used when a plain-text string (possibly containing special
 
    HTML characters) will be used by a script in an HTML context (e.g.
 
    element.innerHTML or jQuery's 'html' method).
 

	
 
    If in doubt, err on the side of using `jshtml` over `js`, since it's
 
    better to escape too much than too little.
 
    """
 
    return js(escape(val))
 

	
 

	
 
def shorter(s, size=20, firstline=False, postfix='...'):
 
    """Truncate s to size, including the postfix string if truncating.
 
    If firstline, truncate at newline.
 
    """
 
    if firstline:
 
        s = s.split('\n', 1)[0].rstrip()
 
    if len(s) > size:
 
        return s[:size - len(postfix)] + postfix
 
    return s
 

	
 

	
 
def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
 
    """
 
    Reset button
 
    """
 
    _set_input_attrs(attrs, type, name, value)
 
    _set_id_attr(attrs, id, name)
 
    convert_boolean_attrs(attrs, ["disabled"])
 
    return HTML.input(**attrs)
 

	
 

	
 
reset = _reset
 
safeid = _make_safe_id_component
 

	
 

	
 
def FID(raw_id, path):
 
    """
 
    Creates a unique ID for filenode based on it's hash of path and revision
 
    it's safe to use in urls
 

	
 
    :param raw_id:
 
    :param path:
 
    """
 

	
 
    return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_str(path)).hexdigest()[:12])
 

	
 

	
 
class _FilesBreadCrumbs(object):
 

	
 
    def __call__(self, repo_name, rev, paths):
 
        if isinstance(paths, str):
 
            paths = safe_unicode(paths)
 
        url_l = [link_to(repo_name, url('files_home',
 
                                        repo_name=repo_name,
 
                                        revision=rev, f_path=''),
 
                         class_='ypjax-link')]
 
        paths_l = paths.split('/')
 
        for cnt, p in enumerate(paths_l):
 
            if p != '':
 
                url_l.append(link_to(p,
 
                                     url('files_home',
 
                                         repo_name=repo_name,
 
                                         revision=rev,
 
                                         f_path='/'.join(paths_l[:cnt + 1])
 
                                         ),
 
                                     class_='ypjax-link'
 
                                     )
 
                             )
 

	
 
        return literal('/'.join(url_l))
 

	
 

	
 
files_breadcrumbs = _FilesBreadCrumbs()
 

	
 

	
 
class CodeHtmlFormatter(HtmlFormatter):
 
    """
 
    My code Html Formatter for source codes
 
    """
 

	
 
    def wrap(self, source, outfile):
 
        return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
 

	
 
    def _wrap_code(self, source):
 
        for cnt, it in enumerate(source):
 
            i, t = it
 
            t = '<span id="L%s">%s</span>' % (cnt + 1, t)
 
            yield i, t
 

	
 
    def _wrap_tablelinenos(self, inner):
 
        dummyoutfile = StringIO.StringIO()
 
        lncount = 0
 
        for t, line in inner:
 
            if t:
 
                lncount += 1
 
            dummyoutfile.write(line)
 
@@ -251,58 +256,61 @@ class CodeHtmlFormatter(HtmlFormatter):
 
                        lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 

	
 
        # in case you wonder about the seemingly redundant <div> here: since the
 
        # content in the other cell also is wrapped in a div, some browsers in
 
        # some configurations seem to mess up the formatting...
 
        if nocls:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td><div class="linenodiv">'
 
                      '<pre>' + ls + '</pre></div></td>'
 
                      '<td id="hlcode" class="code">')
 
        else:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td class="linenos"><div class="linenodiv">'
 
                      '<pre>' + ls + '</pre></div></td>'
 
                      '<td id="hlcode" class="code">')
 
        yield 0, dummyoutfile.getvalue()
 
        yield 0, '</td></tr></table>'
 

	
 

	
 
_whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
 

	
 

	
 
def _markup_whitespace(m):
 
    groups = m.groups()
 
    if groups[0]:
 
        return '<u>\t</u>'
 
    if groups[1]:
 
        return ' <i></i>'
 

	
 

	
 
def markup_whitespace(s):
 
    return _whitespace_re.sub(_markup_whitespace, s)
 

	
 

	
 
def pygmentize(filenode, **kwargs):
 
    """
 
    pygmentize function using pygments
 

	
 
    :param filenode:
 
    """
 
    lexer = get_custom_lexer(filenode.extension) or filenode.lexer
 
    return literal(markup_whitespace(
 
        code_highlight(filenode.content, lexer, CodeHtmlFormatter(**kwargs))))
 

	
 

	
 
def pygmentize_annotation(repo_name, filenode, **kwargs):
 
    """
 
    pygmentize function for annotation
 

	
 
    :param filenode:
 
    """
 

	
 
    color_dict = {}
 

	
 
    def gen_color(n=10000):
 
        """generator for getting n of evenly distributed colors using
 
        hsv color and golden ratio. It always return same order of colors
 

	
 
@@ -378,88 +386,90 @@ def pygmentize_annotation(repo_name, fil
 

	
 

	
 
class _Message(object):
 
    """A message returned by ``Flash.pop_messages()``.
 

	
 
    Converting the message to a string returns the message text. Instances
 
    also have the following attributes:
 

	
 
    * ``message``: the message text.
 
    * ``category``: the category specified when the message was created.
 
    """
 

	
 
    def __init__(self, category, message):
 
        self.category = category
 
        self.message = message
 

	
 
    def __str__(self):
 
        return self.message
 

	
 
    __unicode__ = __str__
 

	
 
    def __html__(self):
 
        return escape(safe_unicode(self.message))
 

	
 

	
 
class Flash(_Flash):
 

	
 
    def __call__(self, message, category=None, ignore_duplicate=False, logf=None):
 
        """
 
        Show a message to the user _and_ log it through the specified function
 

	
 
        category: notice (default), warning, error, success
 
        logf: a custom log function - such as log.debug
 

	
 
        logf defaults to log.info, unless category equals 'success', in which
 
        case logf defaults to log.debug.
 
        """
 
        if logf is None:
 
            logf = log.info
 
            if category == 'success':
 
                logf = log.debug
 

	
 
        logf('Flash %s: %s', category, message)
 

	
 
        super(Flash, self).__call__(message, category, ignore_duplicate)
 

	
 
    def pop_messages(self):
 
        """Return all accumulated messages and delete them from the session.
 

	
 
        The return value is a list of ``Message`` objects.
 
        """
 
        from tg import session
 
        messages = session.pop(self.session_key, [])
 
        session.save()
 
        return [_Message(*m) for m in messages]
 

	
 

	
 
flash = Flash()
 

	
 
#==============================================================================
 
# SCM FILTERS available via h.
 
#==============================================================================
 
from kallithea.lib.vcs.utils import author_name, author_email
 
from kallithea.lib.utils2 import credentials_filter, age as _age
 

	
 
age = lambda  x, y=False: _age(x, y)
 
age = lambda x, y=False: _age(x, y)
 
capitalize = lambda x: x.capitalize()
 
email = author_email
 
short_id = lambda x: x[:12]
 
hide_credentials = lambda x: ''.join(credentials_filter(x))
 

	
 

	
 
def show_id(cs):
 
    """
 
    Configurable function that shows ID
 
    by default it's r123:fffeeefffeee
 

	
 
    :param cs: changeset instance
 
    """
 
    from kallithea import CONFIG
 
    def_len = safe_int(CONFIG.get('show_sha_length', 12))
 
    show_rev = str2bool(CONFIG.get('show_revision_number', False))
 

	
 
    raw_id = cs.raw_id[:def_len]
 
    if show_rev:
 
        return 'r%s:%s' % (cs.revision, raw_id)
 
    else:
 
        return raw_id
 

	
 

	
 
@@ -478,90 +488,92 @@ def is_git(repository):
 
    else:
 
        _type = repository
 
    return _type == 'git'
 

	
 

	
 
def is_hg(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'hg'
 

	
 

	
 
@cache_region('long_term', 'user_or_none')
 
def user_or_none(author):
 
    """Try to match email part of VCS committer string with a local user - or return None"""
 
    from kallithea.model.db import User
 
    email = author_email(author)
 
    if email:
 
        return User.get_by_email(email, cache=True) # cache will only use sql_cache_short
 
    return None
 

	
 

	
 
def email_or_none(author):
 
    """Try to match email part of VCS committer string with a local user.
 
    Return primary email of user, email part of the specified author name, or None."""
 
    if not author:
 
        return None
 
    user = user_or_none(author)
 
    if user is not None:
 
        return user.email # always use main email address - not necessarily the one used to find user
 

	
 
    # extract email from the commit string
 
    email = author_email(author)
 
    if email:
 
        return email
 

	
 
    # No valid email, not a valid user in the system, none!
 
    return None
 

	
 

	
 
def person(author, show_attr="username"):
 
    """Find the user identified by 'author', return one of the users attributes,
 
    default to the username attribute, None if there is no user"""
 
    from kallithea.model.db import User
 
    # attr to return from fetched user
 
    person_getter = lambda usr: getattr(usr, show_attr)
 

	
 
    # if author is already an instance use it for extraction
 
    if isinstance(author, User):
 
        return person_getter(author)
 

	
 
    user = user_or_none(author)
 
    if user is not None:
 
        return person_getter(user)
 

	
 
    # Still nothing?  Just pass back the author name if any, else the email
 
    return author_name(author) or email(author)
 

	
 

	
 
def person_by_id(id_, show_attr="username"):
 
    from kallithea.model.db import User
 
    # attr to return from fetched user
 
    person_getter = lambda usr: getattr(usr, show_attr)
 

	
 
    #maybe it's an ID ?
 
    # maybe it's an ID ?
 
    if str(id_).isdigit() or isinstance(id_, int):
 
        id_ = int(id_)
 
        user = User.get(id_)
 
        if user is not None:
 
            return person_getter(user)
 
    return id_
 

	
 

	
 
def boolicon(value):
 
    """Returns boolean value of a value, represented as small html image of true/false
 
    icons
 

	
 
    :param value: value
 
    """
 

	
 
    if value:
 
        return HTML.tag('i', class_="icon-ok")
 
    else:
 
        return HTML.tag('i', class_="icon-minus-circled")
 

	
 

	
 
def action_parser(user_log, feed=False, parse_cs=False):
 
    """
 
    This helper will action_map the specified string action into translated
 
@@ -593,49 +605,49 @@ def action_parser(user_log, feed=False, 
 
        def lnk(rev, repo_name):
 
            lazy_cs = False
 
            title_ = None
 
            url_ = '#'
 
            if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
 
                if rev.op and rev.ref_name:
 
                    if rev.op == 'delete_branch':
 
                        lbl = _('Deleted branch: %s') % rev.ref_name
 
                    elif rev.op == 'tag':
 
                        lbl = _('Created tag: %s') % rev.ref_name
 
                    else:
 
                        lbl = 'Unknown operation %s' % rev.op
 
                else:
 
                    lazy_cs = True
 
                    lbl = rev.short_id[:8]
 
                    url_ = url('changeset_home', repo_name=repo_name,
 
                               revision=rev.raw_id)
 
            else:
 
                # changeset cannot be found - it might have been stripped or removed
 
                lbl = rev[:12]
 
                title_ = _('Changeset %s not found') % lbl
 
            if parse_cs:
 
                return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'})
 
            return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '',
 
                           **{'data-raw_id':rev.raw_id, 'data-repo_name':repo_name})
 
                           **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name})
 

	
 
        def _get_op(rev_txt):
 
            _op = None
 
            _name = rev_txt
 
            if len(rev_txt.split('=>')) == 2:
 
                _op, _name = rev_txt.split('=>')
 
            return _op, _name
 

	
 
        revs = []
 
        if len(filter(lambda v: v != '', revs_ids)) > 0:
 
            repo = None
 
            for rev in revs_ids[:revs_top_limit]:
 
                _op, _name = _get_op(rev)
 

	
 
                # we want parsed changesets, or new log store format is bad
 
                if parse_cs:
 
                    try:
 
                        if repo is None:
 
                            repo = user_log.repository.scm_instance
 
                        _rev = repo.get_changeset(rev)
 
                        revs.append(_rev)
 
                    except ChangesetDoesNotExistError:
 
                        log.error('cannot find revision %s in this repo', rev)
 
                        revs.append(rev)
 
@@ -827,75 +839,77 @@ from kallithea.lib.auth import HasPermis
 

	
 

	
 
#==============================================================================
 
# GRAVATAR URL
 
#==============================================================================
 
def gravatar_div(email_address, cls='', size=30, **div_attributes):
 
    """Return an html literal with a span around a gravatar if they are enabled.
 
    Extra keyword parameters starting with 'div_' will get the prefix removed
 
    and '_' changed to '-' and be used as attributes on the div. The default
 
    class is 'gravatar'.
 
    """
 
    from tg import tmpl_context as c
 
    if not c.visual.use_gravatar:
 
        return ''
 
    if 'div_class' not in div_attributes:
 
        div_attributes['div_class'] = "gravatar"
 
    attributes = []
 
    for k, v in sorted(div_attributes.items()):
 
        assert k.startswith('div_'), k
 
        attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v)))
 
    return literal("""<span%s>%s</span>""" %
 
                   (''.join(attributes),
 
                    gravatar(email_address, cls=cls, size=size)))
 

	
 

	
 
def gravatar(email_address, cls='', size=30):
 
    """return html element of the gravatar
 

	
 
    This method will return an <img> with the resolution double the size (for
 
    retina screens) of the image. If the url returned from gravatar_url is
 
    empty then we fallback to using an icon.
 

	
 
    """
 
    from tg import tmpl_context as c
 
    if not c.visual.use_gravatar:
 
        return ''
 

	
 
    src = gravatar_url(email_address, size * 2)
 

	
 
    if src:
 
        # here it makes sense to use style="width: ..." (instead of, say, a
 
        # stylesheet) because we using this to generate a high-res (retina) size
 
        html = ('<img alt="" class="{cls}" style="width: {size}px; height: {size}px" src="{src}"/>'
 
            .format(cls=cls, size=size, src=src))
 

	
 
    else:
 
        # if src is empty then there was no gravatar, so we use a font icon
 
        html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>"""
 
            .format(cls=cls, size=size, src=src))
 

	
 
    return literal(html)
 

	
 

	
 
def gravatar_url(email_address, size=30, default=''):
 
    # doh, we need to re-import those to mock it later
 
    from kallithea.config.routing import url
 
    from kallithea.model.db import User
 
    from tg import tmpl_context as c
 
    if not c.visual.use_gravatar:
 
        return ""
 

	
 
    _def = 'anonymous@kallithea-scm.org'  # default gravatar
 
    email_address = email_address or _def
 

	
 
    if email_address == _def:
 
        return default
 

	
 
    parsed_url = urlparse.urlparse(url.current(qualified=True))
 
    url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL ) \
 
               .replace('{email}', email_address) \
 
               .replace('{md5email}', hashlib.md5(safe_str(email_address).lower()).hexdigest()) \
 
               .replace('{netloc}', parsed_url.netloc) \
 
               .replace('{scheme}', parsed_url.scheme) \
 
               .replace('{size}', safe_str(size))
 
    return url
 

	
 

	
 
@@ -910,131 +924,130 @@ def changed_tooltip(nodes):
 
        pref = ': <br/> '
 
        suf = ''
 
        if len(nodes) > 30:
 
            suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
 
        return literal(pref + '<br/> '.join([safe_unicode(x.path)
 
                                             for x in nodes[:30]]) + suf)
 
    else:
 
        return ': ' + _('No files')
 

	
 

	
 
def fancy_file_stats(stats):
 
    """
 
    Displays a fancy two colored bar for number of added/deleted
 
    lines of code on file
 

	
 
    :param stats: two element list of added/deleted lines of code
 
    """
 
    from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
 
        MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
 

	
 
    a, d = stats['added'], stats['deleted']
 
    width = 100
 

	
 
    if stats['binary']:
 
        #binary mode
 
        # binary mode
 
        lbl = ''
 
        bin_op = 1
 

	
 
        if BIN_FILENODE in stats['ops']:
 
            lbl = 'bin+'
 

	
 
        if NEW_FILENODE in stats['ops']:
 
            lbl += _('new file')
 
            bin_op = NEW_FILENODE
 
        elif MOD_FILENODE in stats['ops']:
 
            lbl += _('mod')
 
            bin_op = MOD_FILENODE
 
        elif DEL_FILENODE in stats['ops']:
 
            lbl += _('del')
 
            bin_op = DEL_FILENODE
 
        elif RENAMED_FILENODE in stats['ops']:
 
            lbl += _('rename')
 
            bin_op = RENAMED_FILENODE
 

	
 
        #chmod can go with other operations
 
        # chmod can go with other operations
 
        if CHMOD_FILENODE in stats['ops']:
 
            _org_lbl = _('chmod')
 
            lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
 

	
 
        #import ipdb;ipdb.set_trace()
 
        b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl)
 
        b_a = '<div class="bin bin1" style="width:0%"></div>'
 
        return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d))
 

	
 
    t = stats['added'] + stats['deleted']
 
    unit = float(width) / (t or 1)
 

	
 
    # needs > 9% of width to be visible or 0 to be hidden
 
    a_p = max(9, unit * a) if a > 0 else 0
 
    d_p = max(9, unit * d) if d > 0 else 0
 
    p_sum = a_p + d_p
 

	
 
    if p_sum > width:
 
        #adjust the percentage to be == 100% since we adjusted to 9
 
        # adjust the percentage to be == 100% since we adjusted to 9
 
        if a_p > d_p:
 
            a_p = a_p - (p_sum - width)
 
        else:
 
            d_p = d_p - (p_sum - width)
 

	
 
    a_v = a if a > 0 else ''
 
    d_v = d if d > 0 else ''
 

	
 
    d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % (
 
        a_p, a_v
 
    )
 
    d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % (
 
        d_p, d_v
 
    )
 
    return literal('<div class="pull-right progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d))
 

	
 

	
 
_URLIFY_RE = re.compile(r'''
 
# URL markup
 
(?P<url>%s) |
 
# @mention markup
 
(?P<mention>%s) |
 
# Changeset hash markup
 
(?<!\w|[-_])
 
  (?P<hash>[0-9a-f]{12,40})
 
(?!\w|[-_]) |
 
# Markup of *bold text*
 
(?:
 
  (?:^|(?<=\s))
 
  (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] )
 
  (?![*\w])
 
) |
 
# "Stylize" markup
 
\[see\ \=&gt;\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
 
\[license\ \=&gt;\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] |
 
\[(?P<tagtype>requires|recommends|conflicts|base)\ \=&gt;\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] |
 
\[(?:lang|language)\ \=&gt;\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] |
 
\[(?P<tag>[a-z]+)\]
 
''' % (url_re.pattern, MENTIONS_REGEX.pattern),
 
    re.VERBOSE | re.MULTILINE | re.IGNORECASE)
 

	
 

	
 

	
 
def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate):
 
    """
 
    Parses given text message and make literal html with markup.
 
    The text will be truncated to the specified length.
 
    Hashes are turned into changeset links to specified repository.
 
    URLs links to what they say.
 
    Issues are linked to given issue-server.
 
    If link_ is provided, all text not already linking somewhere will link there.
 
    """
 

	
 
    def _replace(match_obj):
 
        url = match_obj.group('url')
 
        if url is not None:
 
            return '<a href="%(url)s">%(url)s</a>' % {'url': url}
 
        mention = match_obj.group('mention')
 
        if mention is not None:
 
            return '<b>%s</b>' % mention
 
        hash_ = match_obj.group('hash')
 
        if hash_ is not None and repo_name is not None:
 
            from kallithea.config.routing import url  # doh, we need to re-import url to mock it later
 
            return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % {
 
                 'url': url('changeset_home', repo_name=repo_name, revision=hash_),
 
                 'hash': hash_,
 
                }
 
@@ -1110,108 +1123,111 @@ def urlify_issues(newtext, repo_name):
 
        from kallithea.model.db import URL_SEP
 
        assert CONFIG['sqlalchemy.url'] # make sure config has been loaded
 

	
 
        # Build chain of urlify functions, starting with not doing any transformation
 
        tmp_urlify_issues_f = lambda s: s
 

	
 
        issue_pat_re = re.compile(r'issue_pat(.*)')
 
        for k in CONFIG.keys():
 
            # Find all issue_pat* settings that also have corresponding server_link and prefix configuration
 
            m = issue_pat_re.match(k)
 
            if m is None:
 
                continue
 
            suffix = m.group(1)
 
            issue_pat = CONFIG.get(k)
 
            issue_server_link = CONFIG.get('issue_server_link%s' % suffix)
 
            issue_prefix = CONFIG.get('issue_prefix%s' % suffix)
 
            if issue_pat and issue_server_link and issue_prefix:
 
                log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_prefix)
 
            else:
 
                log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_prefix)
 
                continue
 

	
 
            # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound
 
            issue_re = re.compile(issue_pat)
 

	
 
            def issues_replace(match_obj,
 
                               issue_server_link=issue_server_link, issue_prefix=issue_prefix):
 
                leadingspace = ' ' if match_obj.group().startswith(' ') else ''
 
                issue_id = ''.join(match_obj.groups())
 
                issue_url = issue_server_link.replace('{id}', issue_id)
 
                issue_url = issue_url.replace('{repo}', repo_name)
 
                issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1])
 
                return (
 
                    '%(leadingspace)s<a class="issue-tracker-link" href="%(url)s">'
 
                    '%(issue-prefix)s%(id-repr)s'
 
                    '</a>'
 
                    ) % {
 
                     'leadingspace': leadingspace,
 
                     'url': issue_url,
 
                     'id-repr': issue_id,
 
                     'issue-prefix': issue_prefix,
 
                     'serv': issue_server_link,
 
                    }
 
            tmp_urlify_issues_f = (lambda s,
 
                                          issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f:
 
                                   issue_re.sub(issues_replace, chain_f(s)))
 

	
 
        # Set tmp function globally - atomically
 
        _urlify_issues_f = tmp_urlify_issues_f
 

	
 
    return _urlify_issues_f(newtext)
 

	
 

	
 
def render_w_mentions(source, repo_name=None):
 
    """
 
    Render plain text with revision hashes and issue references urlified
 
    and with @mention highlighting.
 
    """
 
    s = safe_unicode(source)
 
    s = urlify_text(s, repo_name=repo_name)
 
    return literal('<div class="formatted-fixed">%s</div>' % s)
 

	
 

	
 
def short_ref(ref_type, ref_name):
 
    if ref_type == 'rev':
 
        return short_id(ref_name)
 
    return ref_name
 

	
 

	
 
def link_to_ref(repo_name, ref_type, ref_name, rev=None):
 
    """
 
    Return full markup for a href to changeset_home for a changeset.
 
    If ref_type is branch it will link to changelog.
 
    ref_name is shortened if ref_type is 'rev'.
 
    if rev is specified show it too, explicitly linking to that revision.
 
    """
 
    txt = short_ref(ref_type, ref_name)
 
    if ref_type == 'branch':
 
        u = url('changelog_home', repo_name=repo_name, branch=ref_name)
 
    else:
 
        u = url('changeset_home', repo_name=repo_name, revision=ref_name)
 
    l = link_to(repo_name + '#' + txt, u)
 
    if rev and ref_type != 'rev':
 
        l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
 
    return l
 

	
 

	
 
def changeset_status(repo, revision):
 
    from kallithea.model.changeset_status import ChangesetStatusModel
 
    return ChangesetStatusModel().get_status(repo, revision)
 

	
 

	
 
def changeset_status_lbl(changeset_status):
 
    from kallithea.model.db import ChangesetStatus
 
    return ChangesetStatus.get_status_lbl(changeset_status)
 

	
 

	
 
def get_permission_name(key):
 
    from kallithea.model.db import Permission
 
    return dict(Permission.PERMS).get(key)
 

	
 

	
 
def journal_filter_help():
 
    return _(textwrap.dedent('''
 
        Example filter terms:
 
            repository:vcs
 
            username:developer
 
            action:*push*
 
            ip:127.0.0.1
 
            date:20120101
 
            date:[20120101100000 TO 20120102]
kallithea/lib/hooks.py
Show inline comments
 
@@ -77,99 +77,99 @@ def repo_size(ui, repo, hooktype=None, *
 

	
 
    size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', repo.root)
 

	
 
    last_cs = repo[len(repo) - 1]
 

	
 
    msg = ('Repository size .hg: %s Checkout: %s Total: %s\n'
 
           'Last revision is now r%s:%s\n') % (
 
        size_hg_f, size_root_f, size_total_f, last_cs.rev(), last_cs.hex()[:12]
 
    )
 
    ui.status(msg)
 

	
 

	
 
def pre_push(ui, repo, **kwargs):
 
    # pre push function, currently used to ban pushing when
 
    # repository is locked
 
    ex = _extract_extras()
 

	
 
    usr = User.get_by_username(ex.username)
 
    if ex.locked_by[0] and usr.user_id != int(ex.locked_by[0]):
 
        locked_by = User.get(ex.locked_by[0]).username
 
        # this exception is interpreted in git/hg middlewares and based
 
        # on that proper return code is server to client
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            # 2xx Codes don't raise exceptions
 
            ui.status(safe_str(_http_ret.title))
 
        else:
 
            raise _http_ret
 

	
 

	
 
def pre_pull(ui, repo, **kwargs):
 
    # pre pull function ...
 
    ex = _extract_extras()
 
    if ex.locked_by[0]:
 
        locked_by = User.get(ex.locked_by[0]).username
 
        # this exception is interpreted in git/hg middlewares and based
 
        # on that proper return code is server to client
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            # 2xx Codes don't raise exceptions
 
            ui.status(safe_str(_http_ret.title))
 
        else:
 
            raise _http_ret
 

	
 

	
 
def log_pull_action(ui, repo, **kwargs):
 
    """
 
    Logs user last pull action
 

	
 
    :param ui:
 
    :param repo:
 
    """
 
    ex = _extract_extras()
 

	
 
    user = User.get_by_username(ex.username)
 
    action = 'pull'
 
    action_logger(user, action, ex.repository, ex.ip, commit=True)
 
    # extension hook call
 
    from kallithea import EXTENSIONS
 
    callback = getattr(EXTENSIONS, 'PULL_HOOK', None)
 
    if callable(callback):
 
        kw = {}
 
        kw.update(ex)
 
        callback(**kw)
 

	
 
    if ex.make_lock is not None and ex.make_lock:
 
        Repository.lock(Repository.get_by_repo_name(ex.repository), user.user_id)
 
        #msg = 'Made lock on repo `%s`' % repository
 
        #ui.status(msg)
 

	
 
    if ex.locked_by[0]:
 
        locked_by = User.get(ex.locked_by[0]).username
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            # 2xx Codes don't raise exceptions
 
            ui.status(safe_str(_http_ret.title))
 
    return 0
 

	
 

	
 
def log_push_action(ui, repo, **kwargs):
 
    """
 
    Register that changes have been pushed.
 
    Mercurial invokes this directly as a hook, git uses handle_git_receive.
 
    """
 

	
 
    ex = _extract_extras()
 

	
 
    action_tmpl = ex.action + ':%s'
 
    revs = []
 
    if ex.scm == 'hg':
 
        node = kwargs['node']
 

	
 
        def get_revs(repo, rev_opt):
 
            if rev_opt:
 
                revs = revrange(repo, rev_opt)
 

	
 
                if len(revs) == 0:
 
                    return (nullrev, nullrev)
 
                return max(revs), min(revs)
 
@@ -182,49 +182,49 @@ def log_push_action(ui, repo, **kwargs):
 
    elif ex.scm == 'git':
 
        revs = kwargs.get('_git_revs', [])
 
        if '_git_revs' in kwargs:
 
            kwargs.pop('_git_revs')
 

	
 
    action = action_tmpl % ','.join(revs)
 
    action_logger(ex.username, action, ex.repository, ex.ip, commit=True)
 

	
 
    # extension hook call
 
    from kallithea import EXTENSIONS
 
    callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
 
    if callable(callback):
 
        kw = {'pushed_revs': revs}
 
        kw.update(ex)
 
        callback(**kw)
 

	
 
    if ex.make_lock is not None and not ex.make_lock:
 
        Repository.unlock(Repository.get_by_repo_name(ex.repository))
 
        ui.status(safe_str('Released lock on repo `%s`\n' % ex.repository))
 

	
 
    if ex.locked_by[0]:
 
        locked_by = User.get(ex.locked_by[0]).username
 
        _http_ret = HTTPLockedRC(ex.repository, locked_by)
 
        if str(_http_ret.code).startswith('2'):
 
            #2xx Codes don't raise exceptions
 
            # 2xx Codes don't raise exceptions
 
            ui.status(safe_str(_http_ret.title))
 

	
 
    return 0
 

	
 

	
 
def log_create_repository(repository_dict, created_by, **kwargs):
 
    """
 
    Post create repository Hook.
 

	
 
    :param repository: dict dump of repository object
 
    :param created_by: username who created repository
 

	
 
    available keys of repository_dict:
 

	
 
     'repo_type',
 
     'description',
 
     'private',
 
     'created_on',
 
     'enable_downloads',
 
     'repo_id',
 
     'owner_id',
 
     'enable_statistics',
 
     'clone_uri',
 
     'fork_id',
 
@@ -425,45 +425,45 @@ def handle_git_receive(repo_path, revs, 
 
    # if push hook is enabled via web interface
 
    elif hook_type == 'post' and _hooks.get(Ui.HOOK_PUSH):
 
        rev_data = []
 
        for l in revs:
 
            old_rev, new_rev, ref = l.strip().split(' ')
 
            _ref_data = ref.split('/')
 
            if _ref_data[1] in ['tags', 'heads']:
 
                rev_data.append({'old_rev': old_rev,
 
                                 'new_rev': new_rev,
 
                                 'ref': ref,
 
                                 'type': _ref_data[1],
 
                                 'name': '/'.join(_ref_data[2:])})
 

	
 
        git_revs = []
 

	
 
        for push_ref in rev_data:
 
            _type = push_ref['type']
 
            if _type == 'heads':
 
                if push_ref['old_rev'] == EmptyChangeset().raw_id:
 
                    # update the symbolic ref if we push new repo
 
                    if repo.is_empty():
 
                        repo._repo.refs.set_symbolic_ref('HEAD',
 
                                            'refs/heads/%s' % push_ref['name'])
 

	
 
                    cmd = ['for-each-ref', '--format=%(refname)','refs/heads/*']
 
                    cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*']
 
                    heads = repo.run_git_command(cmd)[0]
 
                    cmd = ['log', push_ref['new_rev'],
 
                           '--reverse', '--pretty=format:%H', '--not']
 
                    heads = heads.replace(push_ref['ref'], '')
 
                    for l in heads.splitlines():
 
                        cmd.append(l.strip())
 
                    git_revs += repo.run_git_command(cmd)[0].splitlines()
 

	
 
                elif push_ref['new_rev'] == EmptyChangeset().raw_id:
 
                    #delete branch case
 
                    # delete branch case
 
                    git_revs += ['delete_branch=>%s' % push_ref['name']]
 
                else:
 
                    cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref,
 
                           '--reverse', '--pretty=format:%H']
 
                    git_revs += repo.run_git_command(cmd)[0].splitlines()
 

	
 
            elif _type == 'tags':
 
                git_revs += ['tag=>%s' % push_ref['name']]
 

	
 
        log_push_action(baseui, repo, _git_revs=git_revs)
kallithea/lib/indexers/__init__.py
Show inline comments
 
@@ -29,76 +29,76 @@ import os
 
import sys
 
import logging
 
from os.path import dirname
 

	
 
# Add location of top level folder to sys.path
 
sys.path.append(dirname(dirname(dirname(os.path.realpath(__file__)))))
 

	
 
from whoosh.analysis import RegexTokenizer, LowercaseFilter, IDTokenizer
 
from whoosh.fields import TEXT, ID, STORED, NUMERIC, BOOLEAN, Schema, FieldType, DATETIME
 
from whoosh.formats import Characters
 
from whoosh.highlight import highlight as whoosh_highlight, HtmlFormatter, ContextFragmenter
 
from kallithea.lib.utils2 import LazyProperty
 

	
 
log = logging.getLogger(__name__)
 

	
 
# CUSTOM ANALYZER wordsplit + lowercase filter
 
ANALYZER = RegexTokenizer(expression=r"\w+") | LowercaseFilter()
 

	
 
# CUSTOM ANALYZER wordsplit + lowercase filter, for emailaddr-like text
 
#
 
# This is useful to:
 
# - avoid removing "stop words" from text
 
# - search case-insensitively
 
#
 
EMAILADDRANALYZER =  RegexTokenizer() | LowercaseFilter()
 
EMAILADDRANALYZER = RegexTokenizer() | LowercaseFilter()
 

	
 
# CUSTOM ANALYZER raw-string + lowercase filter
 
#
 
# This is useful to:
 
# - avoid tokenization
 
# - avoid removing "stop words" from text
 
# - search case-insensitively
 
#
 
ICASEIDANALYZER = IDTokenizer() | LowercaseFilter()
 

	
 
# CUSTOM ANALYZER raw-string
 
#
 
# This is useful to:
 
# - avoid tokenization
 
# - avoid removing "stop words" from text
 
#
 
IDANALYZER = IDTokenizer()
 

	
 
# CUSTOM ANALYZER wordsplit + lowercase filter, for pathname-like text
 
#
 
# This is useful to:
 
# - avoid removing "stop words" from text
 
# - search case-insensitively
 
#
 
PATHANALYZER = RegexTokenizer() | LowercaseFilter()
 

	
 
#INDEX SCHEMA DEFINITION
 
# INDEX SCHEMA DEFINITION
 
SCHEMA = Schema(
 
    fileid=ID(unique=True),
 
    owner=TEXT(analyzer=EMAILADDRANALYZER),
 
    # this field preserves case of repository name for exact matching
 
    repository_rawname=TEXT(analyzer=IDANALYZER),
 
    repository=TEXT(stored=True, analyzer=ICASEIDANALYZER),
 
    path=TEXT(stored=True, analyzer=PATHANALYZER),
 
    content=FieldType(format=Characters(), analyzer=ANALYZER,
 
                      scorable=True, stored=True),
 
    modtime=STORED(),
 
    extension=TEXT(stored=True, analyzer=PATHANALYZER)
 
)
 

	
 
IDX_NAME = 'HG_INDEX'
 
FORMATTER = HtmlFormatter('span', between='\n<span class="break">...</span>\n')
 
FRAGMENTER = ContextFragmenter(200)
 

	
 
CHGSETS_SCHEMA = Schema(
 
    raw_id=ID(unique=True, stored=True),
 
    date=NUMERIC(stored=True),
 
    last=BOOLEAN(),
 
    owner=TEXT(analyzer=EMAILADDRANALYZER),
 
    # this field preserves case of repository name for exact matching
 
    # and unique-ness in index table
kallithea/lib/indexers/daemon.py
Show inline comments
 
@@ -56,60 +56,60 @@ from whoosh.qparser import QueryParser
 

	
 
log = logging.getLogger('whoosh_indexer')
 

	
 

	
 
class WhooshIndexingDaemon(object):
 
    """
 
    Daemon for atomic indexing jobs
 
    """
 

	
 
    def __init__(self, indexname=IDX_NAME, index_location=None,
 
                 repo_location=None, repo_list=None,
 
                 repo_update_list=None):
 
        self.indexname = indexname
 

	
 
        self.index_location = index_location
 
        if not index_location:
 
            raise Exception('You have to provide index location')
 

	
 
        self.repo_location = repo_location
 
        if not repo_location:
 
            raise Exception('You have to provide repositories location')
 

	
 
        self.repo_paths = ScmModel().repo_scan(self.repo_location)
 

	
 
        #filter repo list
 
        # filter repo list
 
        if repo_list:
 
            #Fix non-ascii repo names to unicode
 
            # Fix non-ascii repo names to unicode
 
            repo_list = map(safe_unicode, repo_list)
 
            self.filtered_repo_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_list:
 
                    self.filtered_repo_paths[repo_name] = repo
 

	
 
            self.repo_paths = self.filtered_repo_paths
 

	
 
        #filter update repo list
 
        # filter update repo list
 
        self.filtered_repo_update_paths = {}
 
        if repo_update_list:
 
            self.filtered_repo_update_paths = {}
 
            for repo_name, repo in self.repo_paths.items():
 
                if repo_name in repo_update_list:
 
                    self.filtered_repo_update_paths[repo_name] = repo
 
            self.repo_paths = self.filtered_repo_update_paths
 

	
 
        self.initial = True
 
        if not os.path.isdir(self.index_location):
 
            os.makedirs(self.index_location)
 
            log.info('Cannot run incremental index since it does not '
 
                     'yet exist running full build')
 
        elif not exists_in(self.index_location, IDX_NAME):
 
            log.info('Running full index build as the file content '
 
                     'index does not exist')
 
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
 
            log.info('Running full index build as the changeset '
 
                     'index does not exist')
 
        else:
 
            self.initial = False
 

	
 
    def _get_index_revision(self, repo):
 
        db_repo = Repository.get_by_repo_name(repo.name_unicode)
kallithea/lib/ipaddr.py
Show inline comments
 
@@ -346,51 +346,53 @@ def collapse_address_list(addresses):
 
            ips.append(ip)
 
        elif ip._prefixlen == ip._max_prefixlen:
 
            if ips and ips[-1]._version != ip._version:
 
                raise TypeError("%s and %s are not of the same version" % (
 
                        str(ip), str(ips[-1])))
 
            ips.append(ip.ip)
 
        else:
 
            if nets and nets[-1]._version != ip._version:
 
                raise TypeError("%s and %s are not of the same version" % (
 
                        str(ip), str(nets[-1])))
 
            nets.append(ip)
 

	
 
    # sort and dedup
 
    ips = sorted(set(ips))
 
    nets = sorted(set(nets))
 

	
 
    while i < len(ips):
 
        (first, last) = _find_address_range(ips[i:])
 
        i = ips.index(last) + 1
 
        addrs.extend(summarize_address_range(first, last))
 

	
 
    return _collapse_address_list_recursive(sorted(
 
        addrs + nets, key=_BaseNet._get_networks_key))
 

	
 

	
 
# backwards compatibility
 
CollapseAddrList = collapse_address_list
 

	
 

	
 
# We need to distinguish between the string and packed-bytes representations
 
# of an IP address.  For example, b'0::1' is the IPv4 address 48.58.58.49,
 
# while '0::1' is an IPv6 address.
 
#
 
# In Python 3, the native 'bytes' type already provides this functionality,
 
# so we use it directly.  For earlier implementations where bytes is not a
 
# distinct type, we create a subclass of str to serve as a tag.
 
#
 
# Usage example (Python 2):
 
#   ip = ipaddr.IPAddress(ipaddr.Bytes('xxxx'))
 
#
 
# Usage example (Python 3):
 
#   ip = ipaddr.IPAddress(b'xxxx')
 
try:
 
    if bytes is str:
 
        raise TypeError("bytes is not a distinct type")
 
    Bytes = bytes
 
except (NameError, TypeError):
 
    class Bytes(str):
 
        def __repr__(self):
 
            return 'Bytes(%s)' % str.__repr__(self)
 

	
 

	
 
def get_mixed_type_key(obj):
kallithea/lib/markup_renderer.py
Show inline comments
 
@@ -58,48 +58,49 @@ class MarkupRenderer(object):
 
        """
 

	
 
        if MarkupRenderer.MARKDOWN_PAT.findall(filename):
 
            detected_renderer = 'markdown'
 
        elif MarkupRenderer.RST_PAT.findall(filename):
 
            detected_renderer = 'rst'
 
        elif MarkupRenderer.PLAIN_PAT.findall(filename):
 
            detected_renderer = 'rst'
 
        else:
 
            detected_renderer = 'plain'
 

	
 
        return getattr(MarkupRenderer, detected_renderer)
 

	
 
    @classmethod
 
    def _flavored_markdown(cls, text):
 
        """
 
        Github style flavored markdown
 

	
 
        :param text:
 
        """
 
        from hashlib import md5
 

	
 
        # Extract pre blocks.
 
        extractions = {}
 

	
 
        def pre_extraction_callback(matchobj):
 
            digest = md5(matchobj.group(0)).hexdigest()
 
            extractions[digest] = matchobj.group(0)
 
            return "{gfm-extraction-%s}" % digest
 
        pattern = re.compile(r'<pre>.*?</pre>', re.MULTILINE | re.DOTALL)
 
        text = re.sub(pattern, pre_extraction_callback, text)
 

	
 
        # Prevent foo_bar_baz from ending up with an italic word in the middle.
 
        def italic_callback(matchobj):
 
            s = matchobj.group(0)
 
            if list(s).count('_') >= 2:
 
                return s.replace('_', '\_')
 
            return s
 
        text = re.sub(r'^(?! {4}|\t)\w+_\w+_\w[\w_]*', italic_callback, text)
 

	
 
        # In very clear cases, let newlines become <br /> tags.
 
        def newline_callback(matchobj):
 
            if len(matchobj.group(1)) == 1:
 
                return matchobj.group(0).rstrip() + '  \n'
 
            else:
 
                return matchobj.group(0)
 
        pattern = re.compile(r'^[\w\<][^\n]*(\n+)', re.MULTILINE)
 
        text = re.sub(pattern, newline_callback, text)
 

	
kallithea/lib/middleware/pygrack.py
Show inline comments
 
@@ -51,49 +51,49 @@ class FileWrapper(object):
 
            try:
 
                data = self.fd.read(size)
 
            except socket.error:
 
                raise IOError(self)
 
            self.remain -= size
 
        elif self.remain:
 
            data = self.fd.read(self.remain)
 
            self.remain = 0
 
        else:
 
            data = None
 
        return data
 

	
 
    def __repr__(self):
 
        return '<FileWrapper %s len: %s, read: %s>' % (
 
            self.fd, self.content_length, self.content_length - self.remain
 
        )
 

	
 

	
 
class GitRepository(object):
 
    git_folder_signature = set(['config', 'head', 'info', 'objects', 'refs'])
 
    commands = ['git-upload-pack', 'git-receive-pack']
 

	
 
    def __init__(self, repo_name, content_path, extras):
 
        files = set([f.lower() for f in os.listdir(content_path)])
 
        if  not (self.git_folder_signature.intersection(files)
 
        if not (self.git_folder_signature.intersection(files)
 
                == self.git_folder_signature):
 
            raise OSError('%s missing git signature' % content_path)
 
        self.content_path = content_path
 
        self.valid_accepts = ['application/x-%s-result' %
 
                              c for c in self.commands]
 
        self.repo_name = repo_name
 
        self.extras = extras
 

	
 
    def _get_fixedpath(self, path):
 
        """
 
        Small fix for repo_path
 

	
 
        :param path:
 
        """
 
        path = safe_unicode(path)
 
        assert path.startswith('/' + self.repo_name + '/')
 
        return path[len(self.repo_name) + 2:].strip('/')
 

	
 
    def inforefs(self, req, environ):
 
        """
 
        WSGI Response producer for HTTP GET Git Smart
 
        HTTP /info/refs request.
 
        """
 

	
kallithea/lib/middleware/sessionmiddleware.py
Show inline comments
 
@@ -5,48 +5,49 @@
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.middleware.sessionmiddleware
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
session management middleware
 

	
 
This file overrides Beaker's built-in SessionMiddleware
 
class to automagically use secure cookies over HTTPS.
 

	
 
Original Beaker SessionMiddleware class written by Ben Bangert
 
"""
 

	
 
from beaker.session import SessionObject
 
from beaker.middleware import SessionMiddleware
 

	
 

	
 
class SecureSessionMiddleware(SessionMiddleware):
 
    def __call__(self, environ, start_response):
 
        """
 
        This function's implementation is taken directly from Beaker,
 
        with HTTPS detection added. When accessed over HTTPS, force
 
        setting cookie's secure flag.
 

	
 
        The only difference from that original code is that we switch
 
        the secure option on and off depending on the URL scheme (first
 
        two lines). To avoid concurrency issues, we use a local options
 
        variable.
 
        """
 
        options = dict(self.options)
 
        options["secure"] = environ['wsgi.url_scheme'] == 'https'
 

	
 
        session = SessionObject(environ, **options)
 
        if environ.get('paste.registry'):
 
            if environ['paste.registry'].reglist:
 
                environ['paste.registry'].register(self.session, session)
 
        environ[self.environ_key] = session
 
        environ['beaker.get_session'] = self._get_session
 

	
 
        if 'paste.testing_variables' in environ and 'webtest_varname' in options:
 
            environ['paste.testing_variables'][options['webtest_varname']] = session
kallithea/lib/middleware/simplehg.py
Show inline comments
 
@@ -41,48 +41,49 @@ from kallithea.lib.base import BaseVCSCo
 
from kallithea.lib.utils import make_ui, is_valid_repo, ui_sections
 
from kallithea.lib.vcs.utils.hgcompat import RepoError, hgweb_mod
 
from kallithea.lib.exceptions import HTTPLockedRC
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def is_mercurial(environ):
 
    """
 
    Returns True if request's target is mercurial server - header
 
    ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
 
    """
 
    http_accept = environ.get('HTTP_ACCEPT')
 
    path_info = environ['PATH_INFO']
 
    if http_accept and http_accept.startswith('application/mercurial'):
 
        ishg_path = True
 
    else:
 
        ishg_path = False
 

	
 
    log.debug('pathinfo: %s detected as Mercurial %s',
 
        path_info, ishg_path
 
    )
 
    return ishg_path
 

	
 

	
 
class SimpleHg(BaseVCSController):
 

	
 
    def _handle_request(self, environ, start_response):
 
        if not is_mercurial(environ):
 
            return self.application(environ, start_response)
 

	
 
        ip_addr = self._get_ip_addr(environ)
 
        # skip passing error to error controller
 
        environ['pylons.status_code_redirect'] = True
 

	
 
        #======================================================================
 
        # EXTRACT REPOSITORY NAME FROM ENV
 
        #======================================================================
 
        try:
 
            str_repo_name = environ['REPO_NAME'] = self.__get_repository(environ)
 
            assert isinstance(str_repo_name, str), str_repo_name
 
            repo_name = safe_unicode(str_repo_name)
 
            assert safe_str(repo_name) == str_repo_name, (str_repo_name, repo_name)
 
            log.debug('Extracted repo name is %s', repo_name)
 
        except Exception as e:
 
            log.error('error extracting repo_name: %r', e)
 
            return HTTPInternalServerError()(environ, start_response)
 

	
 
        # quick check if that dir exists...
 
@@ -208,29 +209,29 @@ class SimpleHg(BaseVCSController):
 
                   'pushkey': 'push'}
 
        for qry in environ['QUERY_STRING'].split('&'):
 
            if qry.startswith('cmd'):
 
                cmd = qry.split('=')[-1]
 
                return mapping.get(cmd, 'pull')
 

	
 
        # Note: the client doesn't get the helpful error message
 
        raise HTTPBadRequest('Unable to detect pull/push action! Are you using non standard command or client?')
 

	
 
    def __inject_extras(self, repo_path, baseui, extras=None):
 
        """
 
        Injects some extra params into baseui instance
 

	
 
        also overwrites global settings with those takes from local hgrc file
 

	
 
        :param baseui: baseui instance
 
        :param extras: dict with extra params to put into baseui
 
        """
 

	
 
        hgrc = os.path.join(repo_path, '.hg', 'hgrc')
 

	
 
        repoui = make_ui('file', hgrc)
 

	
 
        if repoui:
 
            #overwrite our ui instance with the section from hgrc file
 
            # overwrite our ui instance with the section from hgrc file
 
            for section in ui_sections:
 
                for k, v in repoui.configitems(section):
 
                    baseui.setconfig(section, k, v)
 
        _set_extras(extras or {})
kallithea/lib/page.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Custom paging classes
 
"""
 

	
 
import math
 
import re
 
from kallithea.config.routing import url
 
from webhelpers.html import literal, HTML
 
from webhelpers.paginate import Page as _Page
 

	
 

	
 
class Page(_Page):
 
    """
 
    Custom pager to match rendering style with YUI paginator emitting Bootstrap paginators
 
    """
 

	
 
    def __init__(self, *args, **kwargs):
 
        kwargs.setdefault('url', url.current)
 
        _Page.__init__(self, *args, **kwargs)
 

	
 
    def _get_pos(self, cur_page, max_page, items):
 
        edge = (items / 2) + 1
 
        if (cur_page <= edge):
 
            radius = max(items / 2, items - cur_page)
 
        elif (max_page - cur_page) < edge:
 
            radius = (items - 1) - (max_page - cur_page)
 
        else:
 
            radius = items / 2
 

	
 
        left = max(1, (cur_page - (radius)))
 
        right = min(max_page, cur_page + (radius))
 
        return left, cur_page, right
 

	
 
    def _range(self, regexp_match):
 
        """
kallithea/lib/paster_commands/cleanup.py
Show inline comments
 
@@ -63,49 +63,49 @@ class Command(BasePasterCommand):
 
        """
 
        date_part = name[4:19]  # 4:19 since we don't parse milliseconds
 
        return datetime.datetime.strptime(date_part, '%Y%m%d_%H%M%S')
 

	
 
    def take_action(self, args):
 
        repos_location = Ui.get_repos_location()
 
        to_remove = []
 
        for dn_, dirs, f in os.walk(safe_str(repos_location)):
 
            alldirs = list(dirs)
 
            del dirs[:]
 
            if ('.hg' in alldirs or
 
                '.git' in alldirs or
 
                '.svn' in alldirs or
 
                'objects' in alldirs and ('refs' in alldirs or 'packed-refs' in f)):
 
                continue
 
            for loc in alldirs:
 
                if REMOVED_REPO_PAT.match(loc):
 
                    to_remove.append([os.path.join(dn_, loc),
 
                                      self._extract_date(loc)])
 
                else:
 
                    dirs.append(loc)
 
            if dirs:
 
                print 'Scanning: %s' % dn_
 

	
 
        #filter older than (if present)!
 
        # filter older than (if present)!
 
        now = datetime.datetime.now()
 
        older_than = args.older_than
 
        if older_than:
 
            to_remove_filtered = []
 
            older_than_date = self._parse_older_than(older_than)
 
            for name, date_ in to_remove:
 
                repo_age = now - date_
 
                if repo_age > older_than_date:
 
                    to_remove_filtered.append([name, date_])
 

	
 
            to_remove = to_remove_filtered
 
            print 'Removing %s deleted repos older than %s (%s)' \
 
                % (len(to_remove), older_than, older_than_date)
 
        else:
 
            print 'Removing all %s deleted repos' % len(to_remove)
 
        if args.dont_ask or not to_remove:
 
            # don't ask just remove !
 
            remove = True
 
        else:
 
            remove = ask_ok('the following repositories will be deleted completely:\n%s\n'
 
                            'are you sure you want to remove them [y/n]?'
 
                            % '\n'.join(['%s removed on %s' % (safe_str(x[0]), safe_str(x[1]))
 
                                         for x in to_remove]))
 

	

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)