Changeset - cc1ab5ef6686
[Not reviewed]
default
0 25 0
Mads Kiilerich - 11 years ago 2015-01-06 00:54:36
madski@unity3d.com
cleanup: avoid some 'except Exception' catching - catch specific exceptions or log it and show what happened

This has a risk of introducing regressions ... but we want to get rid of all
exception muting and make the whole system less fragile and easier to debug.
13 files changed:
0 comments (0 inline, 0 general)
kallithea/bin/kallithea_api.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.api
 
~~~~~~~~~~~~~~~~~
 

	
 
Api CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 3, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import sys
 
import argparse
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] "
 
      "METHOD <key:val> <key2:val> ...\n"
 
      "Create config file: kallithea-api --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea API cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file')
 
    group.add_argument('--save-config', action='store_true', help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('API')
 
    group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None,
 
            help='API method name to call followed by key:value attributes',
 
    )
 
    group.add_argument('--format', dest='format', type=str,
 
            help='output format default: `%s` can '
 
                 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    apihost = args.apihost or conf['apihost']
 
    method = args.method
 

	
 
    # if we don't have method here it's an error
 
    if not method:
 
        parser.error('Please specify method name')
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except Exception:
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
    return 0
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_config.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_config
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
configuration generator for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 18, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
from __future__ import with_statement
 
import os
 
import sys
 
import uuid
 
import argparse
 
from mako.template import Template
 
TMPL = 'template.ini.mako'
 
here = os.path.dirname(os.path.abspath(__file__))
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-config [-h] [--filename=FILENAME] [--template=TEMPLATE] \n"
 
      "VARS optional specify extra template variable that will be available in "
 
      "template. Use comma separated key=val format eg.\n"
 
      "key1=val1,port=5000,host=127.0.0.1,elements='a\,b\,c'\n"
 
    )
 

	
 
    parser = argparse.ArgumentParser(
 
        description='Kallithea CONFIG generator with variable replacement',
 
        usage=usage
 
    )
 

	
 
    ## config
 
    group = parser.add_argument_group('CONFIG')
 
    group.add_argument('--filename', help='Output ini filename.')
 
    group.add_argument('--template', help='Mako template file to use instead of '
 
                                          'the default builtin template')
 
    group.add_argument('--raw', help='Store given mako template as raw without '
 
                                     'parsing. Use this to create custom template '
 
                                     'initially', action='store_true')
 
    group.add_argument('--show-defaults', help='Show all default variables for '
 
                                               'builtin template', action='store_true')
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _escape_split(text, sep):
 
    """
 
    Allows for escaping of the separator: e.g. arg='foo\, bar'
 

	
 
    It should be noted that the way bash et. al. do command line parsing, those
 
    single quotes are required. a shameless ripoff from fabric project.
 

	
 
    """
 
    escaped_sep = r'\%s' % sep
 

	
 
    if escaped_sep not in text:
 
        return text.split(sep)
 

	
 
    before, _, after = text.partition(escaped_sep)
 
    startlist = before.split(sep)  # a regular split is fine here
 
    unfinished = startlist[-1]
 
    startlist = startlist[:-1]
 

	
 
    # recurse because there may be more escaped separators
 
    endlist = _escape_split(after, sep)
 

	
 
    # finish building the escaped value. we use endlist[0] becaue the first
 
    # part of the string sent in recursion is the rest of the escaped value.
 
    unfinished += sep + endlist[0]
 

	
 
    return startlist + [unfinished] + endlist[1:]  # put together all the parts
 

	
 
def _run(argv):
 
    parser, args, other = argparser(argv)
 
    if not len(sys.argv) > 1:
 
        print parser.print_help()
 
        sys.exit(0)
 
    # defaults that can be overwritten by arguments
 
    tmpl_stored_args = {
 
        'http_server': 'waitress',
 
        'lang': 'en',
 
        'database_engine': 'sqlite',
 
        'host': '127.0.0.1',
 
        'port': 5000,
 
        'error_aggregation_service': None,
 
    }
 
    if other:
 
        # parse arguments, we assume only first is correct
 
        kwargs = {}
 
        for el in _escape_split(other[0], ','):
 
            kv = _escape_split(el, '=')
 
            if len(kv) == 2:
 
                k, v = kv
 
                kwargs[k] = v
 
        # update our template stored args
 
        tmpl_stored_args.update(kwargs)
 

	
 
    # use default that cannot be replaced
 
    tmpl_stored_args.update({
 
        'uuid': lambda: uuid.uuid4().hex,
 
        'here': os.path.abspath(os.curdir),
 
    })
 
    if args.show_defaults:
 
        for k,v in tmpl_stored_args.iteritems():
 
            print '%s=%s' % (k, v)
 
        sys.exit(0)
 
    try:
 
        # built in template
 
        tmpl_file = os.path.join(here, TMPL)
 
        if args.template:
 
            tmpl_file = args.template
 

	
 
        with open(tmpl_file, 'rb') as f:
 
            tmpl_data = f.read()
 
            if args.raw:
 
                tmpl = tmpl_data
 
            else:
 
                tmpl = Template(tmpl_data).render(**tmpl_stored_args)
 
        with open(args.filename, 'wb') as f:
 
            f.write(tmpl)
 
        print 'Wrote new config file in %s' % (os.path.abspath(args.filename))
 

	
 
    except Exception:
 
        from mako import exceptions
 
        print exceptions.text_error_template().render()
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    try:
 
        return _run(argv)
 
    except Exception:
 
        raise
 
    return _run(argv)
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/controllers/admin/admin.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.admin.admin
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Controller for Admin panel of Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 7, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import logging
 

	
 
from pylons import request, tmpl_context as c, url
 
from sqlalchemy.orm import joinedload
 
from whoosh.qparser.default import QueryParser
 
from whoosh.qparser.dateparse import DateParserPlugin
 
from whoosh import query
 
from sqlalchemy.sql.expression import or_, and_, func
 

	
 
from kallithea.model.db import UserLog
 
from kallithea.lib.auth import LoginRequired, HasPermissionAllDecorator
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.utils2 import safe_int, remove_prefix, remove_suffix
 
from kallithea.lib.indexers import JOURNAL_SCHEMA
 
from kallithea.lib.helpers import Page
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _journal_filter(user_log, search_term):
 
    """
 
    Filters sqlalchemy user_log based on search_term with whoosh Query language
 
    http://packages.python.org/Whoosh/querylang.html
 

	
 
    :param user_log:
 
    :param search_term:
 
    """
 
    log.debug('Initial search term: %r' % search_term)
 
    qry = None
 
    if search_term:
 
        qp = QueryParser('repository', schema=JOURNAL_SCHEMA)
 
        qp.add_plugin(DateParserPlugin())
 
        qry = qp.parse(unicode(search_term))
 
        log.debug('Filtering using parsed query %r' % qry)
 

	
 
    def wildcard_handler(col, wc_term):
 
        if wc_term.startswith('*') and not wc_term.endswith('*'):
 
            #postfix == endswith
 
            wc_term = remove_prefix(wc_term, prefix='*')
 
            return func.lower(col).endswith(wc_term)
 
        elif wc_term.startswith('*') and wc_term.endswith('*'):
 
            #wildcard == ilike
 
            wc_term = remove_prefix(wc_term, prefix='*')
 
            wc_term = remove_suffix(wc_term, suffix='*')
 
            return func.lower(col).contains(wc_term)
 

	
 
    def get_filterion(field, val, term):
 

	
 
        if field == 'repository':
 
            field = getattr(UserLog, 'repository_name')
 
        elif field == 'ip':
 
            field = getattr(UserLog, 'user_ip')
 
        elif field == 'date':
 
            field = getattr(UserLog, 'action_date')
 
        elif field == 'username':
 
            field = getattr(UserLog, 'username')
 
        else:
 
            field = getattr(UserLog, field)
 
        log.debug('filter field: %s val=>%s' % (field, val))
 

	
 
        #sql filtering
 
        if isinstance(term, query.Wildcard):
 
            return wildcard_handler(field, val)
 
        elif isinstance(term, query.Prefix):
 
            return func.lower(field).startswith(func.lower(val))
 
        elif isinstance(term, query.DateRange):
 
            return and_(field >= val[0], field <= val[1])
 
        return func.lower(field) == func.lower(val)
 

	
 
    if isinstance(qry, (query.And, query.Term, query.Prefix, query.Wildcard,
 
                        query.DateRange)):
 
        if not isinstance(qry, query.And):
 
            qry = [qry]
 
        for term in qry:
 
            field = term.fieldname
 
            val = (term.text if not isinstance(term, query.DateRange)
 
                   else [term.startdate, term.enddate])
 
            user_log = user_log.filter(get_filterion(field, val, term))
 
    elif isinstance(qry, query.Or):
 
        filters = []
 
        for term in qry:
 
            field = term.fieldname
 
            val = (term.text if not isinstance(term, query.DateRange)
 
                   else [term.startdate, term.enddate])
 
            filters.append(get_filterion(field, val, term))
 
        user_log = user_log.filter(or_(*filters))
 

	
 
    return user_log
 

	
 

	
 
class AdminController(BaseController):
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(AdminController, self).__before__()
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def index(self):
 
        users_log = UserLog.query()\
 
                .options(joinedload(UserLog.user))\
 
                .options(joinedload(UserLog.repository))
 

	
 
        #FILTERING
 
        c.search_term = request.GET.get('filter')
 
        try:
 
            users_log = _journal_filter(users_log, c.search_term)
 
        except Exception:
 
            # we want this to crash for now
 
            raise
 
        users_log = _journal_filter(users_log, c.search_term)
 

	
 
        users_log = users_log.order_by(UserLog.action_date.desc())
 

	
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator)
 
        c.log_data = render('admin/admin_log.html')
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.log_data
 
        return render('admin/admin.html')
kallithea/controllers/changeset.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.changeset
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
changeset controller for pylons showoing changes beetween
 
revisions
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 25, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import traceback
 
from collections import defaultdict
 
from webob.exc import HTTPForbidden, HTTPBadRequest, HTTPNotFound
 

	
 
from pylons import tmpl_context as c, request, response
 
from pylons.i18n.translation import _
 
from pylons.controllers.util import redirect
 
from kallithea.lib.utils import jsonify
 

	
 
from kallithea.lib.vcs.exceptions import RepositoryError, \
 
    ChangesetDoesNotExistError
 

	
 
from kallithea.lib.compat import json
 
import kallithea.lib.helpers as h
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\
 
    NotAnonymous
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.compat import OrderedDict
 
from kallithea.lib import diffs
 
from kallithea.model.db import ChangesetComment, ChangesetStatus
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.lib.exceptions import StatusChangeOnClosedPullRequestError
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.utils2 import safe_unicode
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _update_with_GET(params, GET):
 
    for k in ['diff1', 'diff2', 'diff']:
 
        params[k] += GET.getall(k)
 

	
 

	
 
def anchor_url(revision, path, GET):
 
    fid = h.FID(revision, path)
 
    return h.url.current(anchor=fid, **dict(GET))
 

	
 

	
 
def get_ignore_ws(fid, GET):
 
    ig_ws_global = GET.get('ignorews')
 
    ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
 
    if ig_ws:
 
        try:
 
            return int(ig_ws[0].split(':')[-1])
 
        except Exception:
 
            pass
 
        return int(ig_ws[0].split(':')[-1])
 
    return ig_ws_global
 

	
 

	
 
def _ignorews_url(GET, fileid=None):
 
    fileid = str(fileid) if fileid else None
 
    params = defaultdict(list)
 
    _update_with_GET(params, GET)
 
    lbl = _('Show whitespace')
 
    ig_ws = get_ignore_ws(fileid, GET)
 
    ln_ctx = get_line_ctx(fileid, GET)
 
    # global option
 
    if fileid is None:
 
        if ig_ws is None:
 
            params['ignorews'] += [1]
 
            lbl = _('Ignore whitespace')
 
        ctx_key = 'context'
 
        ctx_val = ln_ctx
 
    # per file options
 
    else:
 
        if ig_ws is None:
 
            params[fileid] += ['WS:1']
 
            lbl = _('Ignore whitespace')
 

	
 
        ctx_key = fileid
 
        ctx_val = 'C:%s' % ln_ctx
 
    # if we have passed in ln_ctx pass it along to our params
 
    if ln_ctx:
 
        params[ctx_key] += [ctx_val]
 

	
 
    params['anchor'] = fileid
 
    icon = h.literal('<i class="icon-strike"></i>')
 
    return h.link_to(icon, h.url.current(**params), title=lbl, class_='tooltip')
 

	
 

	
 
def get_line_ctx(fid, GET):
 
    ln_ctx_global = GET.get('context')
 
    if fid:
 
        ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
 
    else:
 
        _ln_ctx = filter(lambda k: k.startswith('C'), GET)
 
        ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx  else ln_ctx_global
 
        if ln_ctx:
 
            ln_ctx = [ln_ctx]
 

	
 
    if ln_ctx:
 
        retval = ln_ctx[0].split(':')[-1]
 
    else:
 
        retval = ln_ctx_global
 

	
 
    try:
 
        return int(retval)
 
    except Exception:
 
        return 3
 

	
 

	
 
def _context_url(GET, fileid=None):
 
    """
 
    Generates url for context lines
 

	
 
    :param fileid:
 
    """
 

	
 
    fileid = str(fileid) if fileid else None
 
    ig_ws = get_ignore_ws(fileid, GET)
 
    ln_ctx = (get_line_ctx(fileid, GET) or 3) * 2
 

	
 
    params = defaultdict(list)
 
    _update_with_GET(params, GET)
 

	
 
    # global option
 
    if fileid is None:
 
        if ln_ctx > 0:
 
            params['context'] += [ln_ctx]
 

	
 
        if ig_ws:
 
            ig_ws_key = 'ignorews'
 
            ig_ws_val = 1
 

	
 
    # per file option
 
    else:
 
        params[fileid] += ['C:%s' % ln_ctx]
 
        ig_ws_key = fileid
 
        ig_ws_val = 'WS:%s' % 1
 

	
 
    if ig_ws:
 
        params[ig_ws_key] += [ig_ws_val]
 

	
 
    lbl = _('increase diff context to %(num)s lines') % {'num': ln_ctx}
 

	
 
    params['anchor'] = fileid
 
    icon = h.literal('<i class="icon-sort"></i>')
 
    return h.link_to(icon, h.url.current(**params), title=lbl, class_='tooltip')
 

	
 

	
 
class ChangesetController(BaseRepoController):
 

	
 
    def __before__(self):
 
        super(ChangesetController, self).__before__()
 
        c.affected_files_cut_off = 60
 

	
 
    def __load_data(self):
 
        repo_model = RepoModel()
 
        c.users_array = repo_model.get_users_js()
 
        c.user_groups_array = repo_model.get_user_groups_js()
 

	
 
    def _index(self, revision, method):
 
        c.anchor_url = anchor_url
 
        c.ignorews_url = _ignorews_url
 
        c.context_url = _context_url
 
        c.fulldiff = fulldiff = request.GET.get('fulldiff')
 
        #get ranges of revisions if preset
 
        rev_range = revision.split('...')[:2]
 
        enable_comments = True
 
        c.cs_repo = c.db_repo
 
        try:
 
            if len(rev_range) == 2:
 
                enable_comments = False
 
                rev_start = rev_range[0]
 
                rev_end = rev_range[1]
 
                rev_ranges = c.db_repo_scm_instance.get_changesets(start=rev_start,
 
                                                             end=rev_end)
 
            else:
 
                rev_ranges = [c.db_repo_scm_instance.get_changeset(revision)]
 

	
 
            c.cs_ranges = list(rev_ranges)
 
            if not c.cs_ranges:
 
                raise RepositoryError('Changeset range returned empty result')
 

	
 
        except(ChangesetDoesNotExistError,), e:
 
            log.error(traceback.format_exc())
 
            msg = _('Such revision does not exist for this repository')
 
            h.flash(msg, category='error')
 
            raise HTTPNotFound()
 

	
 
        c.changes = OrderedDict()
 

	
 
        c.lines_added = 0  # count of lines added
 
        c.lines_deleted = 0  # count of lines removes
 

	
 
        c.changeset_statuses = ChangesetStatus.STATUSES
 
        comments = dict()
 
        c.statuses = []
 
        c.inline_comments = []
 
        c.inline_cnt = 0
 

	
 
        # Iterate over ranges (default changeset view is always one changeset)
 
        for changeset in c.cs_ranges:
 
            inlines = []
 
            if method == 'show':
 
                c.statuses.extend([ChangesetStatusModel().get_status(
 
                            c.db_repo.repo_id, changeset.raw_id)])
 

	
 
                # Changeset comments
 
                comments.update((com.comment_id, com)
 
                                for com in ChangesetCommentsModel()
 
                                .get_comments(c.db_repo.repo_id,
 
                                              revision=changeset.raw_id))
 

	
 
                # Status change comments - mostly from pull requests
 
                comments.update((st.changeset_comment_id, st.comment)
 
                                for st in ChangesetStatusModel()
 
                                .get_statuses(c.db_repo.repo_id,
 
                                              changeset.raw_id, with_revisions=True))
 

	
 
                inlines = ChangesetCommentsModel()\
 
                            .get_inline_comments(c.db_repo.repo_id,
 
                                                 revision=changeset.raw_id)
 
                c.inline_comments.extend(inlines)
 

	
 
            c.changes[changeset.raw_id] = []
 

	
 
            cs2 = changeset.raw_id
 
            cs1 = changeset.parents[0].raw_id if changeset.parents else EmptyChangeset().raw_id
 
            context_lcl = get_line_ctx('', request.GET)
 
            ign_whitespace_lcl = ign_whitespace_lcl = get_ignore_ws('', request.GET)
 

	
 
            _diff = c.db_repo_scm_instance.get_diff(cs1, cs2,
 
                ignore_whitespace=ign_whitespace_lcl, context=context_lcl)
 
            diff_limit = self.cut_off_limit if not fulldiff else None
 
            diff_processor = diffs.DiffProcessor(_diff,
 
                                                 vcs=c.db_repo_scm_instance.alias,
 
                                                 format='gitdiff',
 
                                                 diff_limit=diff_limit)
 
            cs_changes = OrderedDict()
 
            if method == 'show':
 
                _parsed = diff_processor.prepare()
 
                c.limited_diff = False
 
                if isinstance(_parsed, LimitedDiffContainer):
 
                    c.limited_diff = True
 
                for f in _parsed:
 
                    st = f['stats']
 
                    c.lines_added += st['added']
 
                    c.lines_deleted += st['deleted']
 
                    fid = h.FID(changeset.raw_id, f['filename'])
 
                    diff = diff_processor.as_html(enable_comments=enable_comments,
 
                                                  parsed_lines=[f])
 
                    cs_changes[fid] = [cs1, cs2, f['operation'], f['filename'],
 
                                       diff, st]
 
            else:
 
                # downloads/raw we only need RAW diff nothing else
 
                diff = diff_processor.as_raw()
 
                cs_changes[''] = [None, None, None, None, diff, None]
 
            c.changes[changeset.raw_id] = cs_changes
 

	
 
        #sort comments in creation order
 
        c.comments = [com for com_id, com in sorted(comments.items())]
 

	
 
        # count inline comments
 
        for __, lines in c.inline_comments:
 
            for comments in lines.values():
 
                c.inline_cnt += len(comments)
 

	
 
        if len(c.cs_ranges) == 1:
 
            c.changeset = c.cs_ranges[0]
 
            c.parent_tmpl = ''.join(['# Parent  %s\n' % x.raw_id
 
                                     for x in c.changeset.parents])
 
        if method == 'download':
 
            response.content_type = 'text/plain'
 
            response.content_disposition = 'attachment; filename=%s.diff' \
 
                                            % revision[:12]
 
            return diff
 
        elif method == 'patch':
 
            response.content_type = 'text/plain'
 
            c.diff = safe_unicode(diff)
 
            return render('changeset/patch_changeset.html')
 
        elif method == 'raw':
 
            response.content_type = 'text/plain'
 
            return diff
 
        elif method == 'show':
 
            self.__load_data()
 
            if len(c.cs_ranges) == 1:
 
                return render('changeset/changeset.html')
 
            else:
 
                c.cs_ranges_org = None
 
                c.cs_comments = {}
 
                revs = [ctx.revision for ctx in reversed(c.cs_ranges)]
 
                c.jsdata = json.dumps(graph_data(c.db_repo_scm_instance, revs))
 
                return render('changeset/changeset_range.html')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, revision, method='show'):
 
        return self._index(revision, method=method)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def changeset_raw(self, revision):
 
        return self._index(revision, method='raw')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def changeset_patch(self, revision):
 
        return self._index(revision, method='patch')
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def changeset_download(self, revision):
 
        return self._index(revision, method='download')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def comment(self, repo_name, revision):
 
        status = request.POST.get('changeset_status')
 
        text = request.POST.get('text', '').strip() or _('No comments.')
 

	
 
        c.co = comm = ChangesetCommentsModel().create(
 
            text=text,
 
            repo=c.db_repo.repo_id,
 
            user=c.authuser.user_id,
 
            revision=revision,
 
            f_path=request.POST.get('f_path'),
 
            line_no=request.POST.get('line'),
 
            status_change=(ChangesetStatus.get_status_lbl(status)
 
                           if status else None)
 
        )
 

	
 
        # get status if set !
 
        if status:
 
            # if latest status was from pull request and it's closed
 
            # disallow changing status !
 
            # dont_allow_on_closed_pull_request = True !
 

	
 
            try:
 
                ChangesetStatusModel().set_status(
 
                    c.db_repo.repo_id,
 
                    status,
 
                    c.authuser.user_id,
 
                    comm,
 
                    revision=revision,
 
                    dont_allow_on_closed_pull_request=True
 
                )
 
            except StatusChangeOnClosedPullRequestError:
 
                log.error(traceback.format_exc())
 
                msg = _('Changing status on a changeset associated with '
 
                        'a closed pull request is not allowed')
 
                h.flash(msg, category='warning')
 
                return redirect(h.url('changeset_home', repo_name=repo_name,
 
                                      revision=revision))
 
        action_logger(self.authuser,
 
                      'user_commented_revision:%s' % revision,
 
                      c.db_repo, self.ip_addr, self.sa)
 

	
 
        Session().commit()
 

	
 
        if not request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return redirect(h.url('changeset_home', repo_name=repo_name,
 
                                  revision=revision))
 
        #only ajax below
 
        data = {
 
           'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
 
        }
 
        if comm:
 
            data.update(comm.get_dict())
 
            data.update({'rendered_text':
 
                         render('changeset/changeset_comment_block.html')})
 

	
 
        return data
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def preview_comment(self):
 
        if not request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            raise HTTPBadRequest()
 
        text = request.POST.get('text')
 
        if text:
 
            return h.rst_w_mentions(text)
 
        return ''
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def delete_comment(self, repo_name, comment_id):
 
        co = ChangesetComment.get(comment_id)
 
        if not co:
 
            raise HTTPBadRequest()
 
        owner = co.author.user_id == c.authuser.user_id
 
        repo_admin = h.HasRepoPermissionAny('repository.admin')
 
        if h.HasPermissionAny('hg.admin')() or repo_admin or owner:
 
            ChangesetCommentsModel().delete(comment=co)
 
            Session().commit()
 
            return True
 
        else:
 
            raise HTTPForbidden()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def changeset_info(self, repo_name, revision):
 
        if request.is_xhr:
 
            try:
 
                return c.db_repo_scm_instance.get_changeset(revision)
 
            except ChangesetDoesNotExistError, e:
 
                return EmptyChangeset(message=str(e))
 
        else:
 
            raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def changeset_children(self, repo_name, revision):
 
        if request.is_xhr:
 
            changeset = c.db_repo_scm_instance.get_changeset(revision)
 
            result = {"results": []}
 
            if changeset.children:
 
                result = {"results": changeset.children}
 
            return result
 
        else:
 
            raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def changeset_parents(self, repo_name, revision):
 
        if request.is_xhr:
 
            changeset = c.db_repo_scm_instance.get_changeset(revision)
 
            result = {"results": []}
 
            if changeset.parents:
 
                result = {"results": changeset.parents}
 
            return result
 
        else:
 
            raise HTTPBadRequest()
kallithea/controllers/error.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.error
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea error controller
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Dec 8, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import cgi
 
import logging
 
import paste.fileapp
 

	
 
from pylons import tmpl_context as c, request, config
 
from pylons.i18n.translation import _
 
from pylons.middleware import  media_path
 

	
 
from kallithea.lib.base import BaseController, render
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ErrorController(BaseController):
 
    """Generates error documents as and when they are required.
 

	
 
    The ErrorDocuments middleware forwards to ErrorController when error
 
    related status codes are returned from the application.
 

	
 
    This behavior can be altered by changing the parameters to the
 
    ErrorDocuments middleware in your config/middleware.py file.
 
    """
 

	
 
    def __before__(self):
 
        #disable all base actions since we don't need them here
 
        pass
 

	
 
    def document(self):
 
        resp = request.environ.get('pylons.original_response')
 
        c.site_name = config.get('title')
 

	
 
        log.debug('### %s ###' % resp.status)
 

	
 
        e = request.environ
 
        c.serv_p = r'%(protocol)s://%(host)s/' \
 
                                    % {'protocol': e.get('wsgi.url_scheme'),
 
                                       'host': e.get('HTTP_HOST'), }
 

	
 
        c.error_message = cgi.escape(request.GET.get('code', str(resp.status)))
 
        c.error_explanation = self.get_error_explanation(resp.status_int)
 

	
 
        return render('/errors/error_document.html')
 

	
 
    def img(self, id):
 
        """Serve Pylons' stock images"""
 
        return self._serve_file(os.path.join(media_path, 'img', id))
 

	
 
    def style(self, id):
 
        """Serve Pylons' stock stylesheets"""
 
        return self._serve_file(os.path.join(media_path, 'style', id))
 

	
 
    def _serve_file(self, path):
 
        """Call Paste's FileApp (a WSGI application) to serve the file
 
        at the specified path
 
        """
 
        fapp = paste.fileapp.FileApp(path)
 
        return fapp(request.environ, self.start_response)
 

	
 
    def get_error_explanation(self, code):
 
        """ get the error explanations of int codes
 
            [400, 401, 403, 404, 500]"""
 
        try:
 
            code = int(code)
 
        except Exception:
 
        except ValueError:
 
            code = 500
 

	
 
        if code == 400:
 
            return _('The request could not be understood by the server'
 
                     ' due to malformed syntax.')
 
        if code == 401:
 
            return _('Unauthorized access to resource')
 
        if code == 403:
 
            return _("You don't have permission to view this page")
 
        if code == 404:
 
            return _('The resource could not be found')
 
        if code == 500:
 
            return _('The server encountered an unexpected condition'
 
                     ' which prevented it from fulfilling the request.')
kallithea/controllers/journal.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.journal
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Journal controller for pylons
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
import traceback
 
from itertools import groupby
 

	
 
from sqlalchemy import or_
 
from sqlalchemy.orm import joinedload
 
from sqlalchemy.sql.expression import func
 

	
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from webob.exc import HTTPBadRequest
 
from pylons import request, tmpl_context as c, response, url
 
from pylons.i18n.translation import _
 

	
 
from kallithea.controllers.admin.admin import _journal_filter
 
from kallithea.model.db import UserLog, UserFollowing, Repository, User
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
import kallithea.lib.helpers as h
 
from kallithea.lib.helpers import Page
 
from kallithea.lib.auth import LoginRequired, NotAnonymous
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.utils2 import safe_int, AttributeDict
 
from kallithea.lib.compat import json
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class JournalController(BaseController):
 

	
 
    def __before__(self):
 
        super(JournalController, self).__before__()
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        self.feed_nr = 20
 
        c.search_term = request.GET.get('filter')
 

	
 
    def _get_daily_aggregate(self, journal):
 
        groups = []
 
        for k, g in groupby(journal, lambda x: x.action_as_day):
 
            user_group = []
 
            #groupby username if it's a present value, else fallback to journal username
 
            for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username):
 
                l = list(g2)
 
                user_group.append((l[0].user, l))
 

	
 
            groups.append((k, user_group,))
 

	
 
        return groups
 

	
 
    def _get_journal_data(self, following_repos):
 
        repo_ids = [x.follows_repository.repo_id for x in following_repos
 
                    if x.follows_repository is not None]
 
        user_ids = [x.follows_user.user_id for x in following_repos
 
                    if x.follows_user is not None]
 

	
 
        filtering_criterion = None
 

	
 
        if repo_ids and user_ids:
 
            filtering_criterion = or_(UserLog.repository_id.in_(repo_ids),
 
                        UserLog.user_id.in_(user_ids))
 
        if repo_ids and not user_ids:
 
            filtering_criterion = UserLog.repository_id.in_(repo_ids)
 
        if not repo_ids and user_ids:
 
            filtering_criterion = UserLog.user_id.in_(user_ids)
 
        if filtering_criterion is not None:
 
            journal = self.sa.query(UserLog)\
 
                .options(joinedload(UserLog.user))\
 
                .options(joinedload(UserLog.repository))
 
            #filter
 
            try:
 
                journal = _journal_filter(journal, c.search_term)
 
            except Exception:
 
                # we want this to crash for now
 
                raise
 
            journal = _journal_filter(journal, c.search_term)
 
            journal = journal.filter(filtering_criterion)\
 
                        .order_by(UserLog.action_date.desc())
 
        else:
 
            journal = []
 

	
 
        return journal
 

	
 
    def _atom_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'atom feed')
 
        else:
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'atom feed')
 

	
 
        feed = Atom1Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    def _rss_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'rss feed')
 
        else:
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'rss feed')
 

	
 
        feed = Rss201rev2Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def index(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 
        c.user = User.get(self.authuser.user_id)
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20, url=url_generator)
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 

	
 
        repos_list = Session().query(Repository)\
 
                     .filter(Repository.user_id ==
 
                             self.authuser.user_id)\
 
                     .order_by(func.lower(Repository.repo_name)).all()
 

	
 
        repos_data = RepoModel().get_repos_as_dict(repos_list=repos_list,
 
                                                   admin=True)
 
        #json used to render the grid
 
        c.data = json.dumps(repos_data)
 

	
 
        watched_repos_data = []
 

	
 
        ## watched repos
 
        _render = RepoModel._render_datatable
 

	
 
        def quick_menu(repo_name):
 
            return _render('quick_menu', repo_name)
 

	
 
        def repo_lnk(name, rtype, rstate, private, fork_of):
 
            return _render('repo_name', name, rtype, rstate, private, fork_of,
 
                           short_name=False, admin=False)
 

	
 
        def last_rev(repo_name, cs_cache):
 
            return _render('revision', repo_name, cs_cache.get('revision'),
 
                           cs_cache.get('raw_id'), cs_cache.get('author'),
 
                           cs_cache.get('message'))
 

	
 
        def desc(desc):
 
            from pylons import tmpl_context as c
 
            if c.visual.stylify_metatags:
 
                return h.urlify_text(h.desc_stylize(h.truncate(desc, 60)))
 
            else:
 
                return h.urlify_text(h.truncate(desc, 60))
 

	
 
        def repo_actions(repo_name):
 
            return _render('repo_actions', repo_name)
 

	
 
        def owner_actions(user_id, username):
 
            return _render('user_name', user_id, username)
 

	
 
        def toogle_follow(repo_id):
 
            return  _render('toggle_follow', repo_id)
 

	
 
        for entry in c.following:
 
            repo = entry.follows_repository
 
            cs_cache = repo.changeset_cache
 
            row = {
 
                "menu": quick_menu(repo.repo_name),
 
                "raw_name": repo.repo_name.lower(),
 
                "name": repo_lnk(repo.repo_name, repo.repo_type,
 
                                 repo.repo_state, repo.private, repo.fork),
 
                "last_changeset": last_rev(repo.repo_name, cs_cache),
 
                "last_rev_raw": cs_cache.get('revision'),
 
                "action": toogle_follow(repo.repo_id)
 
            }
 

	
 
            watched_repos_data.append(row)
 

	
 
        c.watched_data = json.dumps({
 
            "totalRecords": len(c.following),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": watched_repos_data
 
        })
 
        return render('journal/journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._atom_feed(following, public=False)
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_rss(self):
 
        """
 
        Produce an rss feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._rss_feed(following, public=False)
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def toggle_following(self):
 
        cur_token = request.POST.get('auth_token')
 
        token = h.get_token()
 
        if cur_token == token:
 

	
 
            user_id = request.POST.get('follows_user_id')
 
            if user_id:
 
                try:
 
                    self.scm_model.toggle_following_user(user_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    log.error(traceback.format_exc())
 
                    raise HTTPBadRequest()
 

	
 
            repo_id = request.POST.get('follows_repo_id')
 
            if repo_id:
 
                try:
 
                    self.scm_model.toggle_following_repo(repo_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    log.error(traceback.format_exc())
 
                    raise HTTPBadRequest()
 

	
 
        log.debug('token mismatch %s vs %s' % (cur_token, token))
 
        raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    def public_journal(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20)
 

	
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 
        return render('journal/public_journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        return self._atom_feed(c.following)
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_rss(self):
 
        """
 
        Produce an rss2 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        return self._rss_feed(c.following)
kallithea/controllers/summary.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.summary
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Summary controller for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 18, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import traceback
 
import calendar
 
import logging
 
from time import mktime
 
from datetime import timedelta, date
 

	
 
from pylons import tmpl_context as c, request
 
from pylons.i18n.translation import _
 
from webob.exc import HTTPBadRequest
 

	
 
from beaker.cache import cache_region, region_invalidate
 

	
 
from kallithea.lib.compat import product
 
from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \
 
    NodeDoesNotExistError
 
from kallithea.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP
 
from kallithea.model.db import Statistics, CacheInvalidation, User
 
from kallithea.lib.utils import jsonify
 
from kallithea.lib.utils2 import safe_str
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\
 
    NotAnonymous
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.markup_renderer import MarkupRenderer
 
from kallithea.lib.celerylib import run_task
 
from kallithea.lib.celerylib.tasks import get_commits_stats
 
from kallithea.lib.compat import json
 
from kallithea.lib.vcs.nodes import FileNode
 
from kallithea.controllers.changelog import _load_changelog_summary
 

	
 
log = logging.getLogger(__name__)
 

	
 
README_FILES = [''.join([x[0][0], x[1][0]]) for x in
 
                    sorted(list(product(ALL_READMES, ALL_EXTS)),
 
                           key=lambda y:y[0][1] + y[1][1])]
 

	
 

	
 
class SummaryController(BaseRepoController):
 

	
 
    def __before__(self):
 
        super(SummaryController, self).__before__()
 

	
 
    def _get_download_links(self, repo):
 

	
 
        download_l = []
 

	
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 

	
 
        for name, chs in c.db_repo_scm_instance.branches.items():
 
            #chs = chs.split(':')[-1]
 
            branches_group[0].append((chs, name),)
 
        download_l.append(branches_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.tags.items():
 
            #chs = chs.split(':')[-1]
 
            tags_group[0].append((chs, name),)
 
        download_l.append(tags_group)
 

	
 
        return download_l
 

	
 
    def __get_readme_data(self, db_repo):
 
        repo_name = db_repo.repo_name
 
        log.debug('Looking for README file')
 

	
 
        @cache_region('long_term')
 
        def _get_readme_from_cache(key, kind):
 
            readme_data = None
 
            readme_file = None
 
            try:
 
                # gets the landing revision! or tip if fails
 
                cs = db_repo.get_landing_changeset()
 
                if isinstance(cs, EmptyChangeset):
 
                    raise EmptyRepositoryError()
 
                renderer = MarkupRenderer()
 
                for f in README_FILES:
 
                    try:
 
                        readme = cs.get_node(f)
 
                        if not isinstance(readme, FileNode):
 
                            continue
 
                        readme_file = f
 
                        log.debug('Found README file `%s` rendering...' %
 
                                  readme_file)
 
                        readme_data = renderer.render(readme.content,
 
                                                      filename=f)
 
                        break
 
                    except NodeDoesNotExistError:
 
                        continue
 
            except ChangesetError:
 
                log.error(traceback.format_exc())
 
                pass
 
            except EmptyRepositoryError:
 
                pass
 
            except Exception:
 
                log.error(traceback.format_exc())
 

	
 
            return readme_data, readme_file
 

	
 
        kind = 'README'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_readme_from_cache, None, repo_name, kind)
 
        return _get_readme_from_cache(repo_name, kind)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name):
 
        _load_changelog_summary()
 

	
 
        username = ''
 
        if self.authuser.username != User.DEFAULT_USER:
 
            username = safe_str(self.authuser.username)
 

	
 
        _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl
 
        if '{repo}' in _def_clone_uri:
 
            _def_clone_uri_by_id = _def_clone_uri.replace('{repo}', '_{repoid}')
 
        elif '{repoid}' in _def_clone_uri:
 
            _def_clone_uri_by_id = _def_clone_uri.replace('_{repoid}', '{repo}')
 

	
 
        c.clone_repo_url = c.db_repo.clone_url(user=username,
 
                                                uri_tmpl=_def_clone_uri)
 
        c.clone_repo_url_id = c.db_repo.clone_url(user=username,
 
                                                uri_tmpl=_def_clone_uri_by_id)
 

	
 
        if c.db_repo.enable_statistics:
 
            c.show_stats = True
 
        else:
 
            c.show_stats = False
 

	
 
        stats = self.sa.query(Statistics)\
 
            .filter(Statistics.repository == c.db_repo)\
 
            .scalar()
 

	
 
        c.stats_percentage = 0
 

	
 
        if stats and stats.languages:
 
            c.no_data = False is c.db_repo.enable_statistics
 
            lang_stats_d = json.loads(stats.languages)
 

	
 
            lang_stats = ((x, {"count": y,
 
                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
 
                          for x, y in lang_stats_d.items())
 

	
 
            c.trending_languages = json.dumps(
 
                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
 
            )
 
        else:
 
            c.no_data = True
 
            c.trending_languages = json.dumps({})
 

	
 
        c.enable_downloads = c.db_repo.enable_downloads
 
        c.readme_data, c.readme_file = \
 
            self.__get_readme_data(c.db_repo)
 
        return render('summary/summary.html')
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    @jsonify
 
    def repo_size(self, repo_name):
 
        if request.is_xhr:
 
            return c.db_repo._repo_size()
 
        else:
 
            raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def statistics(self, repo_name):
 
        if c.db_repo.enable_statistics:
 
            c.show_stats = True
 
            c.no_data_msg = _('No data loaded yet')
 
        else:
 
            c.show_stats = False
 
            c.no_data_msg = _('Statistics are disabled for this repository')
 

	
 
        td = date.today() + timedelta(days=1)
 
        td_1m = td - timedelta(days=calendar.mdays[td.month])
 
        td_1y = td - timedelta(days=365)
 

	
 
        ts_min_m = mktime(td_1m.timetuple())
 
        ts_min_y = mktime(td_1y.timetuple())
 
        ts_max_y = mktime(td.timetuple())
 
        c.ts_min = ts_min_m
 
        c.ts_max = ts_max_y
 

	
 
        stats = self.sa.query(Statistics)\
 
            .filter(Statistics.repository == c.db_repo)\
 
            .scalar()
 
        if stats and stats.languages:
 
            c.no_data = False is c.db_repo.enable_statistics
 
            lang_stats_d = json.loads(stats.languages)
 
            c.commit_data = stats.commit_activity
 
            c.overview_data = stats.commit_activity_combined
 

	
 
            lang_stats = ((x, {"count": y,
 
                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
 
                          for x, y in lang_stats_d.items())
 

	
 
            c.trending_languages = json.dumps(
 
                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
 
            )
 
            last_rev = stats.stat_on_revision + 1
 
            c.repo_last_rev = c.db_repo_scm_instance.count()\
 
                if c.db_repo_scm_instance.revisions else 0
 
            if last_rev == 0 or c.repo_last_rev == 0:
 
                pass
 
            else:
 
                c.stats_percentage = '%.2f' % ((float((last_rev)) /
 
                                                c.repo_last_rev) * 100)
 
        else:
 
            c.commit_data = json.dumps({})
 
            c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
 
            c.trending_languages = json.dumps({})
 
            c.no_data = True
 

	
 
        recurse_limit = 500  # don't recurse more than 500 times when parsing
 
        run_task(get_commits_stats, c.db_repo.repo_name, ts_min_y,
 
                 ts_max_y, recurse_limit)
 
        return render('summary/statistics.html')
kallithea/lib/auth.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth
 
~~~~~~~~~~~~~~~~~~
 

	
 
authentication and permission libraries
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 4, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 
from __future__ import with_statement
 
import time
 
import random
 
import logging
 
import traceback
 
import hashlib
 
import itertools
 
import collections
 

	
 
from tempfile import _RandomNameSequence
 
from decorator import decorator
 

	
 
from pylons import url, request
 
from pylons.controllers.util import abort, redirect
 
from pylons.i18n.translation import _
 
from sqlalchemy import or_
 
from sqlalchemy.orm.exc import ObjectDeletedError
 
from sqlalchemy.orm import joinedload
 

	
 
from kallithea import __platform__, is_windows, is_unix
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.model import meta
 
from kallithea.model.meta import Session
 
from kallithea.model.user import UserModel
 
from kallithea.model.db import User, Repository, Permission, \
 
    UserToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupMember, \
 
    RepoGroup, UserGroupRepoGroupToPerm, UserIpMap, UserGroupUserGroupToPerm, \
 
    UserGroup, UserApiKeys
 

	
 
from kallithea.lib.utils2 import safe_unicode, aslist
 
from kallithea.lib.utils import get_repo_slug, get_repo_group_slug, \
 
    get_user_group_slug, conditional_cache
 
from kallithea.lib.caching_query import FromCache
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class PasswordGenerator(object):
 
    """
 
    This is a simple class for generating password from different sets of
 
    characters
 
    usage::
 

	
 
        passwd_gen = PasswordGenerator()
 
        #print 8-letter password containing only big and small letters
 
            of alphabet
 
        passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
 
    """
 
    ALPHABETS_NUM = r'''1234567890'''
 
    ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
 
    ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
 
    ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
 
    ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
 
        + ALPHABETS_NUM + ALPHABETS_SPECIAL
 
    ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
 
    ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
 
    ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
 
    ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
 

	
 
    def __init__(self, passwd=''):
 
        self.passwd = passwd
 

	
 
    def gen_password(self, length, type_=None):
 
        if type_ is None:
 
            type_ = self.ALPHABETS_FULL
 
        self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
 
        return self.passwd
 

	
 

	
 
class KallitheaCrypto(object):
 

	
 
    @classmethod
 
    def hash_string(cls, str_):
 
        """
 
        Cryptographic function used for password hashing based on pybcrypt
 
        or pycrypto in windows
 

	
 
        :param password: password to hash
 
        """
 
        if is_windows:
 
            from hashlib import sha256
 
            return sha256(str_).hexdigest()
 
        elif is_unix:
 
            import bcrypt
 
            return bcrypt.hashpw(str_, bcrypt.gensalt(10))
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 
    @classmethod
 
    def hash_check(cls, password, hashed):
 
        """
 
        Checks matching password with it's hashed value, runs different
 
        implementation based on platform it runs on
 

	
 
        :param password: password
 
        :param hashed: password in hashed form
 
        """
 

	
 
        if is_windows:
 
            from hashlib import sha256
 
            return sha256(password).hexdigest() == hashed
 
        elif is_unix:
 
            import bcrypt
 
            return bcrypt.hashpw(password, hashed) == hashed
 
        else:
 
            raise Exception('Unknown or unsupported platform %s' \
 
                            % __platform__)
 

	
 

	
 
def get_crypt_password(password):
 
    return KallitheaCrypto.hash_string(password)
 

	
 

	
 
def check_password(password, hashed):
 
    return KallitheaCrypto.hash_check(password, hashed)
 

	
 

	
 
def generate_api_key(str_, salt=None):
 
    """
 
    Generates API KEY from given string
 

	
 
    :param str_:
 
    :param salt:
 
    """
 

	
 
    if salt is None:
 
        salt = _RandomNameSequence().next()
 

	
 
    return hashlib.sha1(str_ + salt).hexdigest()
 

	
 

	
 
class CookieStoreWrapper(object):
 

	
 
    def __init__(self, cookie_store):
 
        self.cookie_store = cookie_store
 

	
 
    def __repr__(self):
 
        return 'CookieStore<%s>' % (self.cookie_store)
 

	
 
    def get(self, key, other=None):
 
        if isinstance(self.cookie_store, dict):
 
            return self.cookie_store.get(key, other)
 
        elif isinstance(self.cookie_store, AuthUser):
 
            return self.cookie_store.__dict__.get(key, other)
 

	
 

	
 

	
 
def _cached_perms_data(user_id, user_is_admin, user_inherit_default_permissions,
 
                       explicit, algo):
 
    RK = 'repositories'
 
    GK = 'repositories_groups'
 
    UK = 'user_groups'
 
    GLOBAL = 'global'
 
    PERM_WEIGHTS = Permission.PERM_WEIGHTS
 
    permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()}
 

	
 
    def _choose_perm(new_perm, cur_perm):
 
        new_perm_val = PERM_WEIGHTS[new_perm]
 
        cur_perm_val = PERM_WEIGHTS[cur_perm]
 
        if algo == 'higherwin':
 
            if new_perm_val > cur_perm_val:
 
                return new_perm
 
            return cur_perm
 
        elif algo == 'lowerwin':
 
            if new_perm_val < cur_perm_val:
 
                return new_perm
 
            return cur_perm
 

	
 
    #======================================================================
 
    # fetch default permissions
 
    #======================================================================
 
    default_user = User.get_by_username('default', cache=True)
 
    default_user_id = default_user.user_id
 

	
 
    default_repo_perms = Permission.get_default_perms(default_user_id)
 
    default_repo_groups_perms = Permission.get_default_group_perms(default_user_id)
 
    default_user_group_perms = Permission.get_default_user_group_perms(default_user_id)
 

	
 
    if user_is_admin:
 
        #==================================================================
 
        # admin user have all default rights for repositories
 
        # and groups set to admin
 
        #==================================================================
 
        permissions[GLOBAL].add('hg.admin')
 
        permissions[GLOBAL].add('hg.create.write_on_repogroup.true')
 

	
 
        # repositories
 
        for perm in default_repo_perms:
 
            r_k = perm.UserRepoToPerm.repository.repo_name
 
            p = 'repository.admin'
 
            permissions[RK][r_k] = p
 

	
 
        # repository groups
 
        for perm in default_repo_groups_perms:
 
            rg_k = perm.UserRepoGroupToPerm.group.group_name
 
            p = 'group.admin'
 
            permissions[GK][rg_k] = p
 

	
 
        # user groups
 
        for perm in default_user_group_perms:
 
            u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
            p = 'usergroup.admin'
 
            permissions[UK][u_k] = p
 
        return permissions
 

	
 
    #==================================================================
 
    # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS
 
    #==================================================================
 
    uid = user_id
 

	
 
    # default global permissions taken fron the default user
 
    default_global_perms = UserToPerm.query()\
 
        .filter(UserToPerm.user_id == default_user_id)\
 
        .options(joinedload(UserToPerm.permission))
 

	
 
    for perm in default_global_perms:
 
        permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # defaults for repositories, taken from default user
 
    for perm in default_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        if perm.Repository.private and not (perm.Repository.user_id == uid):
 
            # disable defaults for private repos,
 
            p = 'repository.none'
 
        elif perm.Repository.user_id == uid:
 
            # set admin if owner
 
            p = 'repository.admin'
 
        else:
 
            p = perm.Permission.permission_name
 

	
 
        permissions[RK][r_k] = p
 

	
 
    # defaults for repository groups taken from default user permission
 
    # on given group
 
    for perm in default_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        permissions[GK][rg_k] = p
 

	
 
    # defaults for user groups taken from default user permission
 
    # on given user group
 
    for perm in default_user_group_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        permissions[UK][u_k] = p
 

	
 
    #======================================================================
 
    # !! OVERRIDE GLOBALS !! with user permissions if any found
 
    #======================================================================
 
    # those can be configured from groups or users explicitly
 
    _configurable = set([
 
        'hg.fork.none', 'hg.fork.repository',
 
        'hg.create.none', 'hg.create.repository',
 
        'hg.usergroup.create.false', 'hg.usergroup.create.true'
 
    ])
 

	
 
    # USER GROUPS comes first
 
    # user group global permissions
 
    user_perms_from_users_groups = Session().query(UserGroupToPerm)\
 
        .options(joinedload(UserGroupToPerm.permission))\
 
        .join((UserGroupMember, UserGroupToPerm.users_group_id ==
 
               UserGroupMember.users_group_id))\
 
        .filter(UserGroupMember.user_id == uid)\
 
        .order_by(UserGroupToPerm.users_group_id)\
 
        .all()
 
    # need to group here by groups since user can be in more than
 
    # one group
 
    _grouped = [[x, list(y)] for x, y in
 
                itertools.groupby(user_perms_from_users_groups,
 
                                  lambda x:x.users_group)]
 
    for gr, perms in _grouped:
 
        # since user can be in multiple groups iterate over them and
 
        # select the lowest permissions first (more explicit)
 
        ##TODO: do this^^
 
        if not gr.inherit_default_permissions:
 
            # NEED TO IGNORE all configurable permissions and
 
            # replace them with explicitly set
 
            permissions[GLOBAL] = permissions[GLOBAL]\
 
                                            .difference(_configurable)
 
        for perm in perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 

	
 
    # user specific global permissions
 
    user_perms = Session().query(UserToPerm)\
 
            .options(joinedload(UserToPerm.permission))\
 
            .filter(UserToPerm.user_id == uid).all()
 

	
 
    if not user_inherit_default_permissions:
 
        # NEED TO IGNORE all configurable permissions and
 
        # replace them with explicitly set
 
        permissions[GLOBAL] = permissions[GLOBAL]\
 
                                        .difference(_configurable)
 

	
 
        for perm in user_perms:
 
            permissions[GLOBAL].add(perm.permission.permission_name)
 
    ## END GLOBAL PERMISSIONS
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORIES !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository and
 
    # fill in his permission from it. _choose_perm decides of which
 
    # permission should be selected based on selected method
 
    #======================================================================
 

	
 
    # user group for repositories permissions
 
    user_repo_perms_from_users_groups = \
 
     Session().query(UserGroupRepoToPerm, Permission, Repository,)\
 
        .join((Repository, UserGroupRepoToPerm.repository_id ==
 
               Repository.repo_id))\
 
        .join((Permission, UserGroupRepoToPerm.permission_id ==
 
               Permission.permission_id))\
 
        .join((UserGroupMember, UserGroupRepoToPerm.users_group_id ==
 
               UserGroupMember.users_group_id))\
 
        .filter(UserGroupMember.user_id == uid)\
 
        .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_repo_perms_from_users_groups:
 
        r_k = perm.UserGroupRepoToPerm.repository.repo_name
 
        multiple_counter[r_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[RK][r_k]
 

	
 
        if perm.Repository.user_id == uid:
 
            # set admin if owner
 
            p = 'repository.admin'
 
        else:
 
            if multiple_counter[r_k] > 1:
 
                p = _choose_perm(p, cur_perm)
 
        permissions[RK][r_k] = p
 

	
 
    # user explicit permissions for repositories, overrides any specified
 
    # by the group permission
 
    user_repo_perms = Permission.get_default_perms(uid)
 
    for perm in user_repo_perms:
 
        r_k = perm.UserRepoToPerm.repository.repo_name
 
        cur_perm = permissions[RK][r_k]
 
        # set admin if owner
 
        if perm.Repository.user_id == uid:
 
            p = 'repository.admin'
 
        else:
 
            p = perm.Permission.permission_name
 
            if not explicit:
 
                p = _choose_perm(p, cur_perm)
 
        permissions[RK][r_k] = p
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR REPOSITORY GROUPS !!
 
    #======================================================================
 
    #======================================================================
 
    # check if user is part of user groups for this repository groups and
 
    # fill in his permission from it. _choose_perm decides of which
 
    # permission should be selected based on selected method
 
    #======================================================================
 
    # user group for repo groups permissions
 
    user_repo_group_perms_from_users_groups = \
 
     Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup)\
 
     .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id))\
 
     .join((Permission, UserGroupRepoGroupToPerm.permission_id
 
            == Permission.permission_id))\
 
     .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id
 
            == UserGroupMember.users_group_id))\
 
     .filter(UserGroupMember.user_id == uid)\
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_repo_group_perms_from_users_groups:
 
        g_k = perm.UserGroupRepoGroupToPerm.group.group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[GK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[GK][g_k] = p
 

	
 
    # user explicit permissions for repository groups
 
    user_repo_groups_perms = Permission.get_default_group_perms(uid)
 
    for perm in user_repo_groups_perms:
 
        rg_k = perm.UserRepoGroupToPerm.group.group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[GK][rg_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[GK][rg_k] = p
 

	
 
    #======================================================================
 
    # !! PERMISSIONS FOR USER GROUPS !!
 
    #======================================================================
 
    # user group for user group permissions
 
    user_group_user_groups_perms = \
 
     Session().query(UserGroupUserGroupToPerm, Permission, UserGroup)\
 
     .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id
 
            == UserGroup.users_group_id))\
 
     .join((Permission, UserGroupUserGroupToPerm.permission_id
 
            == Permission.permission_id))\
 
     .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id
 
            == UserGroupMember.users_group_id))\
 
     .filter(UserGroupMember.user_id == uid)\
 
     .all()
 

	
 
    multiple_counter = collections.defaultdict(int)
 
    for perm in user_group_user_groups_perms:
 
        g_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
 
        multiple_counter[g_k] += 1
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][g_k]
 
        if multiple_counter[g_k] > 1:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][g_k] = p
 

	
 
    #user explicit permission for user groups
 
    user_user_groups_perms = Permission.get_default_user_group_perms(uid)
 
    for perm in user_user_groups_perms:
 
        u_k = perm.UserUserGroupToPerm.user_group.users_group_name
 
        p = perm.Permission.permission_name
 
        cur_perm = permissions[UK][u_k]
 
        if not explicit:
 
            p = _choose_perm(p, cur_perm)
 
        permissions[UK][u_k] = p
 

	
 
    return permissions
 

	
 

	
 
def allowed_api_access(controller_name, whitelist=None, api_key=None):
 
    """
 
    Check if given controller_name is in whitelist API access
 
    """
 
    if not whitelist:
 
        from kallithea import CONFIG
 
        whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'),
 
                           sep=',')
 
        log.debug('whitelist of API access is: %s' % (whitelist))
 
    api_access_valid = controller_name in whitelist
 
    if api_access_valid:
 
        log.debug('controller:%s is in API whitelist' % (controller_name))
 
    else:
 
        msg = 'controller: %s is *NOT* in API whitelist' % (controller_name)
 
        if api_key:
 
            #if we use API key and don't have access it's a warning
 
            log.warning(msg)
 
        else:
 
            log.debug(msg)
 
    return api_access_valid
 

	
 

	
 
class AuthUser(object):
 
    """
 
    A simple object that handles all attributes of user in Kallithea
 

	
 
    It does lookup based on API key,given user, or user present in session
 
    Then it fills all required information for such user. It also checks if
 
    anonymous access is enabled and if so, it returns default user as logged in
 
    """
 

	
 
    def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
 

	
 
        self.user_id = user_id
 
        self._api_key = api_key
 

	
 
        self.api_key = None
 
        self.username = username
 
        self.ip_addr = ip_addr
 
        self.name = ''
 
        self.lastname = ''
 
        self.email = ''
 
        self.is_authenticated = False
 
        self.admin = False
 
        self.inherit_default_permissions = False
 

	
 
        self.propagate_data()
 
        self._instance = None
 

	
 
    @LazyProperty
 
    def permissions(self):
 
        return self.get_perms(user=self, cache=False)
 

	
 
    @property
 
    def api_keys(self):
 
        return self.get_api_keys()
 

	
 
    def propagate_data(self):
 
        user_model = UserModel()
 
        self.anonymous_user = User.get_default_user(cache=True)
 
        is_user_loaded = False
 

	
 
        # lookup by userid
 
        if self.user_id is not None and self.user_id != self.anonymous_user.user_id:
 
            log.debug('Auth User lookup by USER ID %s' % self.user_id)
 
            is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
 

	
 
        # try go get user by api key
 
        elif self._api_key and self._api_key != self.anonymous_user.api_key:
 
            log.debug('Auth User lookup by API KEY %s' % self._api_key)
 
            is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
 

	
 
        # lookup by username
 
        elif self.username:
 
            log.debug('Auth User lookup by USER NAME %s' % self.username)
 
            is_user_loaded = user_model.fill_data(self, username=self.username)
 
        else:
 
            log.debug('No data in %s that could been used to log in' % self)
 

	
 
        if not is_user_loaded:
 
            # if we cannot authenticate user try anonymous
 
            if self.anonymous_user.active:
 
                user_model.fill_data(self, user_id=self.anonymous_user.user_id)
 
                # then we set this user is logged in
 
                self.is_authenticated = True
 
            else:
 
                self.user_id = None
 
                self.username = None
 
                self.is_authenticated = False
 

	
 
        if not self.username:
 
            self.username = 'None'
 

	
 
        log.debug('Auth User is now %s' % self)
 

	
 
    def get_perms(self, user, explicit=True, algo='higherwin', cache=False):
 
        """
 
        Fills user permission attribute with permissions taken from database
 
        works for permissions given for repositories, and for permissions that
 
        are granted to groups
 

	
 
        :param user: instance of User object from database
 
        :param explicit: In case there are permissions both for user and a group
 
            that user is part of, explicit flag will defiine if user will
 
            explicitly override permissions from group, if it's False it will
 
            make decision based on the algo
 
        :param algo: algorithm to decide what permission should be choose if
 
            it's multiple defined, eg user in two different groups. It also
 
            decides if explicit flag is turned off how to specify the permission
 
            for case when user is in a group + have defined separate permission
 
        """
 
        user_id = user.user_id
 
        user_is_admin = user.is_admin
 
        user_inherit_default_permissions = user.inherit_default_permissions
 

	
 
        log.debug('Getting PERMISSION tree')
 
        compute = conditional_cache('short_term', 'cache_desc',
 
                                    condition=cache, func=_cached_perms_data)
 
        return compute(user_id, user_is_admin,
 
                       user_inherit_default_permissions, explicit, algo)
 

	
 
    def get_api_keys(self):
 
        api_keys = [self.api_key]
 
        for api_key in UserApiKeys.query()\
 
                .filter(UserApiKeys.user_id == self.user_id)\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time())).all():
 
            api_keys.append(api_key.api_key)
 

	
 
        return api_keys
 

	
 
    @property
 
    def is_admin(self):
 
        return self.admin
 

	
 
    @property
 
    def repositories_admin(self):
 
        """
 
        Returns list of repositories you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['repositories'].iteritems()
 
                if x[1] == 'repository.admin']
 

	
 
    @property
 
    def repository_groups_admin(self):
 
        """
 
        Returns list of repository groups you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['repositories_groups'].iteritems()
 
                if x[1] == 'group.admin']
 

	
 
    @property
 
    def user_groups_admin(self):
 
        """
 
        Returns list of user groups you're an admin of
 
        """
 
        return [x[0] for x in self.permissions['user_groups'].iteritems()
 
                if x[1] == 'usergroup.admin']
 

	
 
    @property
 
    def ip_allowed(self):
 
        """
 
        Checks if ip_addr used in constructor is allowed from defined list of
 
        allowed ip_addresses for user
 

	
 
        :returns: boolean, True if ip is in allowed ip range
 
        """
 
        # check IP
 
        inherit = self.inherit_default_permissions
 
        return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
 
                                         inherit_from_default=inherit)
 

	
 
    @classmethod
 
    def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
 
        allowed_ips = AuthUser.get_allowed_ips(user_id, cache=True,
 
                        inherit_from_default=inherit_from_default)
 
        if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
 
            log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
 
            return True
 
        else:
 
            log.info('Access for IP:%s forbidden, '
 
                     'not in %s' % (ip_addr, allowed_ips))
 
            return False
 

	
 
    def __repr__(self):
 
        return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
 
            % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
 

	
 
    def set_authenticated(self, authenticated=True):
 
        if self.user_id != self.anonymous_user.user_id:
 
            self.is_authenticated = authenticated
 

	
 
    def get_cookie_store(self):
 
        return {'username': self.username,
 
                'user_id': self.user_id,
 
                'is_authenticated': self.is_authenticated}
 

	
 
    @classmethod
 
    def from_cookie_store(cls, cookie_store):
 
        """
 
        Creates AuthUser from a cookie store
 

	
 
        :param cls:
 
        :param cookie_store:
 
        """
 
        user_id = cookie_store.get('user_id')
 
        username = cookie_store.get('username')
 
        api_key = cookie_store.get('api_key')
 
        return AuthUser(user_id, api_key, username)
 

	
 
    @classmethod
 
    def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
 
        _set = set()
 

	
 
        if inherit_from_default:
 
            default_ips = UserIpMap.query().filter(UserIpMap.user ==
 
                                            User.get_default_user(cache=True))
 
            if cache:
 
                default_ips = default_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_default"))
 

	
 
            # populate from default user
 
            for ip in default_ips:
 
                try:
 
                    _set.add(ip.ip_addr)
 
                except ObjectDeletedError:
 
                    # since we use heavy caching sometimes it happens that we get
 
                    # deleted objects here, we just skip them
 
                    pass
 

	
 
        user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
 
        if cache:
 
            user_ips = user_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_%s" % user_id))
 

	
 
        for ip in user_ips:
 
            try:
 
                _set.add(ip.ip_addr)
 
            except ObjectDeletedError:
 
                # since we use heavy caching sometimes it happens that we get
 
                # deleted objects here, we just skip them
 
                pass
 
        return _set or set(['0.0.0.0/0', '::/0'])
 

	
 

	
 
def set_available_permissions(config):
 
    """
 
    This function will propagate pylons globals with all available defined
 
    permission given in db. We don't want to check each time from db for new
 
    permissions since adding a new permission also requires application restart
 
    ie. to decorate new views with the newly created permission
 

	
 
    :param config: current pylons config instance
 

	
 
    """
 
    log.info('getting information about all available permissions')
 
    try:
 
        sa = meta.Session
 
        all_perms = sa.query(Permission).all()
 
        config['available_permissions'] = [x.permission_name for x in all_perms]
 
    except Exception:
 
        log.error(traceback.format_exc())
 
    finally:
 
        meta.Session.remove()
 

	
 

	
 
#==============================================================================
 
# CHECK DECORATORS
 
#==============================================================================
 
class LoginRequired(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page
 

	
 
    :param api_access: if enabled this checks only for valid auth token
 
        and grants access based on valid token
 
    """
 

	
 
    def __init__(self, api_access=False):
 
        self.api_access = api_access
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        user = cls.authuser
 
        loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
 

	
 
        # check if our IP is allowed
 
        ip_access_valid = True
 
        if not user.ip_allowed:
 
            from kallithea.lib import helpers as h
 
            h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr))),
 
                    category='warning')
 
            ip_access_valid = False
 

	
 
        # check if we used an APIKEY and it's a valid one
 
        # defined whitelist of controllers which API access will be enabled
 
        _api_key = request.GET.get('api_key', '')
 
        api_access_valid = allowed_api_access(loc, api_key=_api_key)
 

	
 
        # explicit controller is enabled or API is in our whitelist
 
        if self.api_access or api_access_valid:
 
            log.debug('Checking API KEY access for %s' % cls)
 
            if _api_key and _api_key in user.api_keys:
 
                api_access_valid = True
 
                log.debug('API KEY ****%s is VALID' % _api_key[-4:])
 
            else:
 
                api_access_valid = False
 
                if not _api_key:
 
                    log.debug("API KEY *NOT* present in request")
 
                else:
 
                    log.warning("API KEY ****%s *NOT* valid" % _api_key[-4:])
 

	
 
        log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
 
        reason = 'RegularAuth' if user.is_authenticated else 'APIAuth'
 

	
 
        if ip_access_valid and (user.is_authenticated or api_access_valid):
 
            log.info('user %s authenticating with:%s IS authenticated on func %s '
 
                     % (user, reason, loc)
 
            )
 
            return func(*fargs, **fkwargs)
 
        else:
 
            log.warning('user %s authenticating with:%s NOT authenticated on func: %s: '
 
                     'IP_ACCESS:%s API_ACCESS:%s'
 
                     % (user, reason, loc, ip_access_valid, api_access_valid)
 
            )
 
            p = url.current()
 

	
 
            log.debug('redirecting to login page with %s' % p)
 
            return redirect(url('login_home', came_from=p))
 

	
 

	
 
class NotAnonymous(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page"""
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        self.user = cls.authuser
 

	
 
        log.debug('Checking if user is not anonymous @%s' % cls)
 

	
 
        anonymous = self.user.username == User.DEFAULT_USER
 

	
 
        if anonymous:
 
            p = url.current()
 

	
 
            import kallithea.lib.helpers as h
 
            h.flash(_('You need to be a registered user to '
 
                      'perform this action'),
 
                    category='warning')
 
            return redirect(url('login_home', came_from=p))
 
        else:
 
            return func(*fargs, **fkwargs)
 

	
 

	
 
class PermsDecorator(object):
 
    """Base class for controller decorators"""
 

	
 
    def __init__(self, *required_perms):
 
        self.required_perms = set(required_perms)
 
        self.user_perms = None
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        self.user = cls.authuser
 
        self.user_perms = self.user.permissions
 
        log.debug('checking %s permissions %s for %s %s',
 
           self.__class__.__name__, self.required_perms, cls, self.user)
 

	
 
        if self.check_permissions():
 
            log.debug('Permission granted for %s %s' % (cls, self.user))
 
            return func(*fargs, **fkwargs)
 

	
 
        else:
 
            log.debug('Permission denied for %s %s' % (cls, self.user))
 
            anonymous = self.user.username == User.DEFAULT_USER
 

	
 
            if anonymous:
 
                p = url.current()
 

	
 
                import kallithea.lib.helpers as h
 
                h.flash(_('You need to be a signed in to '
 
                          'view this page'),
 
                        category='warning')
 
                return redirect(url('login_home', came_from=p))
 

	
 
            else:
 
                # redirect with forbidden ret code
 
                return abort(403)
 

	
 
    def check_permissions(self):
 
        """Dummy function for overriding"""
 
        raise Exception('You have to write this function in child class')
 

	
 

	
 
class HasPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates. All of them
 
    have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        if self.required_perms.issubset(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates. In order to
 
    fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        if self.required_perms.intersection(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    repository. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        repo_name = get_repo_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories'][repo_name]])
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    repository. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        repo_name = get_repo_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories'][repo_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    repository group. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_repo_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    repository group. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_repo_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['repositories_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAllDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for all given predicates for specific
 
    user group. All of them have to be meet in order to fulfill the request
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_user_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['user_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.issubset(user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAnyDecorator(PermsDecorator):
 
    """
 
    Checks for access permission for any of given predicates for specific
 
    user group. In order to fulfill the request any of predicates must be meet
 
    """
 

	
 
    def check_permissions(self):
 
        group_name = get_user_group_slug(request)
 
        try:
 
            user_perms = set([self.user_perms['user_groups'][group_name]])
 
        except KeyError:
 
            return False
 

	
 
        if self.required_perms.intersection(user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# CHECK FUNCTIONS
 
#==============================================================================
 
class PermsFunction(object):
 
    """Base function for other check functions"""
 

	
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 
        self.user_perms = None
 
        self.repo_name = None
 
        self.group_name = None
 

	
 
    def __call__(self, check_location='', user=None):
 
        if not user:
 
            #TODO: remove this someday,put as user as attribute here
 
            user = request.user
 

	
 
        # init auth user if not already given
 
        if not isinstance(user, AuthUser):
 
            user = AuthUser(user.user_id)
 

	
 
        cls_name = self.__class__.__name__
 
        check_scope = {
 
            'HasPermissionAll': '',
 
            'HasPermissionAny': '',
 
            'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
 
            'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
 
            'HasRepoGroupPermissionAll': 'group:%s' % self.group_name,
 
            'HasRepoGroupPermissionAny': 'group:%s' % self.group_name,
 
        }.get(cls_name, '?')
 
        log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
 
                  self.required_perms, user, check_scope,
 
                  check_location or 'unspecified location')
 
        if not user:
 
            log.debug('Empty request user')
 
            return False
 
        self.user_perms = user.permissions
 
        if self.check_permissions():
 
            log.debug('Permission to %s granted for user: %s @ %s'
 
                      % (check_scope, user,
 
                         check_location or 'unspecified location'))
 
            return True
 

	
 
        else:
 
            log.debug('Permission to %s denied for user: %s @ %s'
 
                      % (check_scope, user,
 
                         check_location or 'unspecified location'))
 
            return False
 

	
 
    def check_permissions(self):
 
        """Dummy function for overriding"""
 
        raise Exception('You have to write this function in child class')
 

	
 

	
 
class HasPermissionAll(PermsFunction):
 
    def check_permissions(self):
 
        if self.required_perms.issubset(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAny(PermsFunction):
 
    def check_permissions(self):
 
        if self.required_perms.intersection(self.user_perms.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAll(PermsFunction):
 
    def __call__(self, repo_name=None, check_location='', user=None):
 
        self.repo_name = repo_name
 
        return super(HasRepoPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        if not self.repo_name:
 
            self.repo_name = get_repo_slug(request)
 

	
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories'][self.repo_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAny(PermsFunction):
 
    def __call__(self, repo_name=None, check_location='', user=None):
 
        self.repo_name = repo_name
 
        return super(HasRepoPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        if not self.repo_name:
 
            self.repo_name = get_repo_slug(request)
 

	
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories'][self.repo_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAny(PermsFunction):
 
    def __call__(self, group_name=None, check_location='', user=None):
 
        self.group_name = group_name
 
        return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories_groups'][self.group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAll(PermsFunction):
 
    def __call__(self, group_name=None, check_location='', user=None):
 
        self.group_name = group_name
 
        return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['repositories_groups'][self.group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAny(PermsFunction):
 
    def __call__(self, user_group_name=None, check_location='', user=None):
 
        self.user_group_name = user_group_name
 
        return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['user_groups'][self.user_group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAll(PermsFunction):
 
    def __call__(self, user_group_name=None, check_location='', user=None):
 
        self.user_group_name = user_group_name
 
        return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['user_groups'][self.user_group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
 
#==============================================================================
 
class HasPermissionAnyMiddleware(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, user, repo_name):
 
        # repo_name MUST be unicode, since we handle keys in permission
 
        # dict by unicode
 
        repo_name = safe_unicode(repo_name)
 
        usr = AuthUser(user.user_id)
 
        try:
 
            self.user_perms = set([usr.permissions['repositories'][repo_name]])
 
        except Exception:
 
            log.error('Exception while accessing permissions %s' %
 
                      traceback.format_exc())
 
            self.user_perms = set()
 
        self.user_perms = set([usr.permissions['repositories'][repo_name]])
 
        self.username = user.username
 
        self.repo_name = repo_name
 
        return self.check_permissions()
 

	
 
    def check_permissions(self):
 
        log.debug('checking VCS protocol '
 
                  'permissions %s for user:%s repository:%s', self.user_perms,
 
                                                self.username, self.repo_name)
 
        if self.required_perms.intersection(self.user_perms):
 
            log.debug('Permission to repo: %s granted for user: %s @ %s'
 
                      % (self.repo_name, self.username, 'PermissionMiddleware'))
 
            return True
 
        log.debug('Permission to repo: %s denied for user: %s @ %s'
 
                  % (self.repo_name, self.username, 'PermissionMiddleware'))
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE API AUTH
 
#==============================================================================
 
class _BaseApiPerm(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, check_location=None, user=None, repo_name=None,
 
                 group_name=None):
 
        cls_name = self.__class__.__name__
 
        check_scope = 'user:%s' % (user)
 
        if repo_name:
 
            check_scope += ', repo:%s' % (repo_name)
 

	
 
        if group_name:
 
            check_scope += ', repo group:%s' % (group_name)
 

	
 
        log.debug('checking cls:%s %s %s @ %s'
 
                  % (cls_name, self.required_perms, check_scope, check_location))
 
        if not user:
 
            log.debug('Empty User passed into arguments')
 
            return False
 

	
 
        ## process user
 
        if not isinstance(user, AuthUser):
 
            user = AuthUser(user.user_id)
 
        if not check_location:
 
            check_location = 'unspecified'
 
        if self.check_permissions(user.permissions, repo_name, group_name):
 
            log.debug('Permission to %s granted for user: %s @ %s'
 
                      % (check_scope, user, check_location))
 
            return True
 

	
 
        else:
 
            log.debug('Permission to %s denied for user: %s @ %s'
 
                      % (check_scope, user, check_location))
 
            return False
 

	
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        """
 
        implement in child class should return True if permissions are ok,
 
        False otherwise
 

	
 
        :param perm_defs: dict with permission definitions
 
        :param repo_name: repo name
 
        """
 
        raise NotImplementedError()
 

	
 

	
 
class HasPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        if self.required_perms.issubset(perm_defs.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        if self.required_perms.intersection(perm_defs.get('global')):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories'][repo_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.issubset(_user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories'][repo_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.intersection(_user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories_groups'][group_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.intersection(_user_perms):
 
            return True
 
        return False
 

	
 
class HasRepoGroupPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories_groups'][group_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.issubset(_user_perms):
 
            return True
 
        return False
 

	
 
def check_ip_access(source_ip, allowed_ips=None):
 
    """
 
    Checks if source_ip is a subnet of any of allowed_ips.
 

	
 
    :param source_ip:
 
    :param allowed_ips: list of allowed ips together with mask
 
    """
 
    from kallithea.lib import ipaddr
 
    log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
 
    if isinstance(allowed_ips, (tuple, list, set)):
 
        for ip in allowed_ips:
 
            try:
 
                if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip):
 
                    log.debug('IP %s is network %s' %
 
                              (ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip)))
 
                    return True
 
                # for any case we cannot determine the IP, don't crash just
 
                # skip it and log as error, we want to say forbidden still when
 
                # sending bad IP
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                continue
 
            if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip):
 
                log.debug('IP %s is network %s' %
 
                          (ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip)))
 
                return True
 
    return False
kallithea/lib/auth_modules/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Authentication modules
 
"""
 

	
 
import logging
 
import traceback
 

	
 
from kallithea import EXTERN_TYPE_INTERNAL
 
from kallithea.lib.compat import importlib
 
from kallithea.lib.utils2 import str2bool
 
from kallithea.lib.compat import formatted_json, hybrid_property
 
from kallithea.lib.auth import PasswordGenerator
 
from kallithea.model.user import UserModel
 
from kallithea.model.db import Setting, User
 
from kallithea.model.meta import Session
 
from kallithea.model.user_group import UserGroupModel
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class LazyFormencode(object):
 
    def __init__(self, formencode_obj, *args, **kwargs):
 
        self.formencode_obj = formencode_obj
 
        self.args = args
 
        self.kwargs = kwargs
 

	
 
    def __call__(self, *args, **kwargs):
 
        from inspect import isfunction
 
        formencode_obj = self.formencode_obj
 
        if isfunction(formencode_obj):
 
            #case we wrap validators into functions
 
            formencode_obj = self.formencode_obj(*args, **kwargs)
 
        return formencode_obj(*self.args, **self.kwargs)
 

	
 

	
 
class KallitheaAuthPluginBase(object):
 
    auth_func_attrs = {
 
        "username": "unique username",
 
        "firstname": "first name",
 
        "lastname": "last name",
 
        "email": "email address",
 
        "groups": '["list", "of", "groups"]',
 
        "extern_name": "name in external source of record",
 
        "extern_type": "type of external source of record",
 
        "admin": 'True|False defines if user should be Kallithea super admin',
 
        "active": 'True|False defines active state of user internally for Kallithea',
 
        "active_from_extern": "True|False\None, active state from the external auth, "
 
                              "None means use definition from Kallithea extern_type active value"
 
    }
 

	
 
    @property
 
    def validators(self):
 
        """
 
        Exposes Kallithea validators modules
 
        """
 
        # this is a hack to overcome issues with pylons threadlocals and
 
        # translator object _() not beein registered properly.
 
        class LazyCaller(object):
 
            def __init__(self, name):
 
                self.validator_name = name
 

	
 
            def __call__(self, *args, **kwargs):
 
                from kallithea.model import validators as v
 
                obj = getattr(v, self.validator_name)
 
                #log.debug('Initializing lazy formencode object: %s' % obj)
 
                return LazyFormencode(obj, *args, **kwargs)
 

	
 

	
 
        class ProxyGet(object):
 
            def __getattribute__(self, name):
 
                return LazyCaller(name)
 

	
 
        return ProxyGet()
 

	
 
    @hybrid_property
 
    def name(self):
 
        """
 
        Returns the name of this authentication plugin.
 

	
 
        :returns: string
 
        """
 
        raise NotImplementedError("Not implemented in base class")
 

	
 
    @hybrid_property
 
    def is_container_auth(self):
 
        """
 
        Returns bool if this module uses container auth.
 

	
 
        This property will trigger an automatic call to authenticate on
 
        a visit to the website or during a push/pull.
 

	
 
        :returns: bool
 
        """
 
        return False
 

	
 
    def accepts(self, user, accepts_empty=True):
 
        """
 
        Checks if this authentication module should accept a request for
 
        the current user.
 

	
 
        :param user: user object fetched using plugin's get_user() method.
 
        :param accepts_empty: if True accepts don't allow the user to be empty
 
        :returns: boolean
 
        """
 
        plugin_name = self.name
 
        if not user and not accepts_empty:
 
            log.debug('User is empty not allowed to authenticate')
 
            return False
 

	
 
        if user and user.extern_type and user.extern_type != plugin_name:
 
            log.debug('User %s should authenticate using %s this is %s, skipping'
 
                      % (user, user.extern_type, plugin_name))
 

	
 
            return False
 
        return True
 

	
 
    def get_user(self, username=None, **kwargs):
 
        """
 
        Helper method for user fetching in plugins, by default it's using
 
        simple fetch by username, but this method can be custimized in plugins
 
        eg. container auth plugin to fetch user by environ params
 

	
 
        :param username: username if given to fetch from database
 
        :param kwargs: extra arguments needed for user fetching.
 
        """
 
        user = None
 
        log.debug('Trying to fetch user `%s` from Kallithea database'
 
                  % (username))
 
        if username:
 
            user = User.get_by_username(username)
 
            if not user:
 
                log.debug('Fallback to fetch user in case insensitive mode')
 
                user = User.get_by_username(username, case_insensitive=True)
 
        else:
 
            log.debug('provided username:`%s` is empty skipping...' % username)
 
        return user
 

	
 
    def settings(self):
 
        """
 
        Return a list of the form:
 
        [
 
            {
 
                "name": "OPTION_NAME",
 
                "type": "[bool|password|string|int|select]",
 
                ["values": ["opt1", "opt2", ...]]
 
                "validator": "expr"
 
                "description": "A short description of the option" [,
 
                "default": Default Value],
 
                ["formname": "Friendly Name for Forms"]
 
            } [, ...]
 
        ]
 

	
 
        This is used to interrogate the authentication plugin as to what
 
        settings it expects to be present and configured.
 

	
 
        'type' is a shorthand notation for what kind of value this option is.
 
        This is primarily used by the auth web form to control how the option
 
        is configured.
 
                bool : checkbox
 
                password : password input box
 
                string : input box
 
                select : single select dropdown
 

	
 
        'validator' is an lazy instantiated form field validator object, ala
 
        formencode. You need to *call* this object to init the validators.
 
        All calls to Kallithea validators should be used through self.validators
 
        which is a lazy loading proxy of formencode module.
 
        """
 
        raise NotImplementedError("Not implemented in base class")
 

	
 
    def plugin_settings(self):
 
        """
 
        This method is called by the authentication framework, not the .settings()
 
        method. This method adds a few default settings (e.g., "active"), so that
 
        plugin authors don't have to maintain a bunch of boilerplate.
 

	
 
        OVERRIDING THIS METHOD WILL CAUSE YOUR PLUGIN TO FAIL.
 
        """
 

	
 
        rcsettings = self.settings()
 
        rcsettings.insert(0, {
 
            "name": "enabled",
 
            "validator": self.validators.StringBoolean(if_missing=False),
 
            "type": "bool",
 
            "description": "Enable or Disable this Authentication Plugin",
 
            "formname": "Enabled"
 
            }
 
        )
 
        return rcsettings
 

	
 
    def user_activation_state(self):
 
        """
 
        Defines user activation state when creating new users
 

	
 
        :returns: boolean
 
        """
 
        raise NotImplementedError("Not implemented in base class")
 

	
 
    def auth(self, userobj, username, passwd, settings, **kwargs):
 
        """
 
        Given a user object (which may be null), username, a plaintext password,
 
        and a settings object (containing all the keys needed as listed in settings()),
 
        authenticate this user's login attempt.
 

	
 
        Return None on failure. On success, return a dictionary of the form:
 

	
 
            see: KallitheaAuthPluginBase.auth_func_attrs
 
        This is later validated for correctness
 
        """
 
        raise NotImplementedError("not implemented in base class")
 

	
 
    def _authenticate(self, userobj, username, passwd, settings, **kwargs):
 
        """
 
        Wrapper to call self.auth() that validates call on it
 

	
 
        :param userobj: userobj
 
        :param username: username
 
        :param passwd: plaintext password
 
        :param settings: plugin settings
 
        """
 
        auth = self.auth(userobj, username, passwd, settings, **kwargs)
 
        if auth:
 
            return self._validate_auth_return(auth)
 
        return auth
 

	
 
    def _validate_auth_return(self, ret):
 
        if not isinstance(ret, dict):
 
            raise Exception('returned value from auth must be a dict')
 
        for k in self.auth_func_attrs:
 
            if k not in ret:
 
                raise Exception('Missing %s attribute from returned data' % k)
 
        return ret
 

	
 

	
 
class KallitheaExternalAuthPlugin(KallitheaAuthPluginBase):
 
    def use_fake_password(self):
 
        """
 
        Return a boolean that indicates whether or not we should set the user's
 
        password to a random value when it is authenticated by this plugin.
 
        If your plugin provides authentication, then you will generally want this.
 

	
 
        :returns: boolean
 
        """
 
        raise NotImplementedError("Not implemented in base class")
 

	
 
    def _authenticate(self, userobj, username, passwd, settings, **kwargs):
 
        auth = super(KallitheaExternalAuthPlugin, self)._authenticate(
 
            userobj, username, passwd, settings, **kwargs)
 
        if auth:
 
            # maybe plugin will clean the username ?
 
            # we should use the return value
 
            username = auth['username']
 
            # if user is not active from our extern type we should fail to authe
 
            # this can prevent from creating users in Kallithea when using
 
            # external authentication, but if it's inactive user we shouldn't
 
            # create that user anyway
 
            if auth['active_from_extern'] is False:
 
                log.warning("User %s authenticated against %s, but is inactive"
 
                            % (username, self.__module__))
 
                return None
 

	
 
            if self.use_fake_password():
 
                # Randomize the PW because we don't need it, but don't want
 
                # them blank either
 
                passwd = PasswordGenerator().gen_password(length=8)
 

	
 
            log.debug('Updating or creating user info from %s plugin'
 
                      % self.name)
 
            user = UserModel().create_or_update(
 
                username=username,
 
                password=passwd,
 
                email=auth["email"],
 
                firstname=auth["firstname"],
 
                lastname=auth["lastname"],
 
                active=auth["active"],
 
                admin=auth["admin"],
 
                extern_name=auth["extern_name"],
 
                extern_type=self.name
 
            )
 
            Session().flush()
 
            # enforce user is just in given groups, all of them has to be ones
 
            # created from plugins. We store this info in _group_data JSON field
 
            try:
 
                groups = auth['groups'] or []
 
                UserGroupModel().enforce_groups(user, groups, self.name)
 
            except Exception:
 
                # for any reason group syncing fails, we should proceed with login
 
                log.error(traceback.format_exc())
 
            groups = auth['groups'] or []
 
            UserGroupModel().enforce_groups(user, groups, self.name)
 
            Session().commit()
 
        return auth
 

	
 

	
 
def importplugin(plugin):
 
    """
 
    Imports and returns the authentication plugin in the module named by plugin
 
    (e.g., plugin='kallithea.lib.auth_modules.auth_internal'). Returns the
 
    KallitheaAuthPluginBase subclass on success, raises exceptions on failure.
 

	
 
    raises:
 
        AttributeError -- no KallitheaAuthPlugin class in the module
 
        TypeError -- if the KallitheaAuthPlugin is not a subclass of ours KallitheaAuthPluginBase
 
        ImportError -- if we couldn't import the plugin at all
 
    """
 
    log.debug("Importing %s" % plugin)
 
    if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'):
 
        parts = plugin.split(u'.lib.auth_modules.auth_', 1)
 
        if len(parts) == 2:
 
            _module, pn = parts
 
            if pn == EXTERN_TYPE_INTERNAL:
 
                pn = "internal"
 
            plugin = u'kallithea.lib.auth_modules.auth_' + pn
 
    PLUGIN_CLASS_NAME = "KallitheaAuthPlugin"
 
    try:
 
        module = importlib.import_module(plugin)
 
    except (ImportError, TypeError):
 
        log.error(traceback.format_exc())
 
        # TODO: make this more error prone, if by some accident we screw up
 
        # the plugin name, the crash is preatty bad and hard to recover
 
        raise
 

	
 
    log.debug("Loaded auth plugin from %s (module:%s, file:%s)"
 
              % (plugin, module.__name__, module.__file__))
 

	
 
    pluginclass = getattr(module, PLUGIN_CLASS_NAME)
 
    if not issubclass(pluginclass, KallitheaAuthPluginBase):
 
        raise TypeError("Authentication class %s.KallitheaAuthPlugin is not "
 
                        "a subclass of %s" % (plugin, KallitheaAuthPluginBase))
 
    return pluginclass
 

	
 

	
 
def loadplugin(plugin):
 
    """
 
    Loads and returns an instantiated authentication plugin.
 

	
 
        see: importplugin
 
    """
 
    plugin = importplugin(plugin)()
 
    if plugin.plugin_settings.im_func != KallitheaAuthPluginBase.plugin_settings.im_func:
 
        raise TypeError("Authentication class %s.KallitheaAuthPluginBase "
 
                        "has overriden the plugin_settings method, which is "
 
                        "forbidden." % plugin)
 
    return plugin
 

	
 

	
 
def authenticate(username, password, environ=None):
 
    """
 
    Authentication function used for access control,
 
    It tries to authenticate based on enabled authentication modules.
 

	
 
    :param username: username can be empty for container auth
 
    :param password: password can be empty for container auth
 
    :param environ: environ headers passed for container auth
 
    :returns: None if auth failed, plugin_user dict if auth is correct
 
    """
 

	
 
    auth_plugins = Setting.get_auth_plugins()
 
    log.debug('Authentication against %s plugins' % (auth_plugins,))
 
    for module in auth_plugins:
 
        try:
 
            plugin = loadplugin(module)
 
        except (ImportError, AttributeError, TypeError), e:
 
            raise ImportError('Failed to load authentication module %s : %s'
 
                              % (module, str(e)))
 
        log.debug('Trying authentication using ** %s **' % (module,))
 
        # load plugin settings from Kallithea database
 
        plugin_name = plugin.name
 
        plugin_settings = {}
 
        for v in plugin.plugin_settings():
 
            conf_key = "auth_%s_%s" % (plugin_name, v["name"])
 
            setting = Setting.get_by_name(conf_key)
 
            plugin_settings[v["name"]] = setting.app_settings_value if setting else None
 
        log.debug('Plugin settings \n%s' % formatted_json(plugin_settings))
 

	
 
        if not str2bool(plugin_settings["enabled"]):
 
            log.info("Authentication plugin %s is disabled, skipping for %s"
 
                     % (module, username))
 
            continue
 

	
 
        # use plugin's method of user extraction.
 
        user = plugin.get_user(username, environ=environ,
 
                               settings=plugin_settings)
 
        log.debug('Plugin %s extracted user is `%s`' % (module, user))
 
        if not plugin.accepts(user):
 
            log.debug('Plugin %s does not accept user `%s` for authentication'
 
                      % (module, user))
 
            continue
 
        else:
 
            log.debug('Plugin %s accepted user `%s` for authentication'
 
                      % (module, user))
 

	
 
        log.info('Authenticating user using %s plugin' % plugin.__module__)
 
        # _authenticate is a wrapper for .auth() method of plugin.
 
        # it checks if .auth() sends proper data. for KallitheaExternalAuthPlugin
 
        # it also maps users to Database and maps the attributes returned
 
        # from .auth() to Kallithea database. If this function returns data
 
        # then auth is correct.
 
        plugin_user = plugin._authenticate(user, username, password,
 
                                           plugin_settings,
 
                                           environ=environ or {})
 
        log.debug('PLUGIN USER DATA: %s' % plugin_user)
 

	
 
        if plugin_user:
 
            log.debug('Plugin returned proper authentication data')
 
            return plugin_user
 

	
 
        # we failed to Auth because .auth() method didn't return proper the user
 
        if username:
 
            log.warning("User `%s` failed to authenticate against %s"
 
                        % (username, plugin.__module__))
 
    return None
kallithea/lib/auth_modules/auth_pam.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.auth_pam
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea authentication library for PAM
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Created on Apr 09, 2013
 
:author: Alexey Larikov
 
"""
 

	
 
import logging
 
import time
 
import pam
 
import pwd
 
import grp
 
import re
 
import socket
 
import threading
 

	
 
from kallithea.lib import auth_modules
 
from kallithea.lib.compat import formatted_json, hybrid_property
 

	
 
log = logging.getLogger(__name__)
 

	
 
# Cache to store PAM authenticated users
 
_auth_cache = dict()
 
_pam_lock = threading.Lock()
 

	
 

	
 
class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin):
 
    # PAM authnetication can be slow. Repository operations involve a lot of
 
    # auth calls. Little caching helps speedup push/pull operations significantly
 
    AUTH_CACHE_TTL = 4
 

	
 
    def __init__(self):
 
        global _auth_cache
 
        ts = time.time()
 
        cleared_cache = dict(
 
            [(k, v) for (k, v) in _auth_cache.items() if
 
             (v + KallitheaAuthPlugin.AUTH_CACHE_TTL > ts)])
 
        _auth_cache = cleared_cache
 

	
 
    @hybrid_property
 
    def name(self):
 
        return "pam"
 

	
 
    def settings(self):
 
        settings = [
 
            {
 
                "name": "service",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "PAM service name to use for authentication",
 
                "default": "login",
 
                "formname": "PAM service name"
 
            },
 
            {
 
                "name": "gecos",
 
                "validator": self.validators.UnicodeString(strip=True),
 
                "type": "string",
 
                "description": "Regex for extracting user name/email etc "
 
                               "from Unix userinfo",
 
                "default": "(?P<last_name>.+),\s*(?P<first_name>\w+)",
 
                "formname": "Gecos Regex"
 
            }
 
        ]
 
        return settings
 

	
 
    def use_fake_password(self):
 
        return True
 

	
 
    def auth(self, userobj, username, password, settings, **kwargs):
 
        if username not in _auth_cache:
 
            # Need lock here, as PAM authentication is not thread safe
 
            _pam_lock.acquire()
 
            try:
 
                auth_result = pam.authenticate(username, password,
 
                                               settings["service"])
 
                # cache result only if we properly authenticated
 
                if auth_result:
 
                    _auth_cache[username] = time.time()
 
            finally:
 
                _pam_lock.release()
 

	
 
            if not auth_result:
 
                log.error("PAM was unable to authenticate user: %s" % (username,))
 
                return None
 
        else:
 
            log.debug("Using cached auth for user: %s" % (username,))
 

	
 
        # old attrs fetched from Kallithea database
 
        admin = getattr(userobj, 'admin', False)
 
        active = getattr(userobj, 'active', True)
 
        email = getattr(userobj, 'email', '') or "%s@%s" % (username, socket.gethostname())
 
        firstname = getattr(userobj, 'firstname', '')
 
        lastname = getattr(userobj, 'lastname', '')
 
        extern_type = getattr(userobj, 'extern_type', '')
 

	
 
        user_attrs = {
 
            'username': username,
 
            'firstname': firstname,
 
            'lastname': lastname,
 
            'groups': [g.gr_name for g in grp.getgrall() if username in g.gr_mem],
 
            'email': email,
 
            'admin': admin,
 
            'active': active,
 
            "active_from_extern": None,
 
            'extern_name': username,
 
            'extern_type': extern_type,
 
        }
 

	
 
        try:
 
            user_data = pwd.getpwnam(username)
 
            regex = settings["gecos"]
 
            match = re.search(regex, user_data.pw_gecos)
 
            if match:
 
                user_attrs["firstname"] = match.group('first_name')
 
                user_attrs["lastname"] = match.group('last_name')
 
        except Exception:
 
            log.warning("Cannot extract additional info for PAM user")
 
            log.warning("Cannot extract additional info for PAM user %s", username)
 
            pass
 

	
 
        log.debug("pamuser: \n%s" % formatted_json(user_attrs))
 
        log.info('user %s authenticated correctly' % user_attrs['username'])
 
        return user_attrs
kallithea/lib/celerylib/tasks.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.celerylib.tasks
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea task modules, containing all task that suppose to be run
 
by celery daemon
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 6, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from celery.decorators import task
 

	
 
import os
 
import traceback
 
import logging
 
from os.path import join as jn
 

	
 
from time import mktime
 
from operator import itemgetter
 
from string import lower
 

	
 
from pylons import config
 

	
 
from kallithea import CELERY_ON
 
from kallithea.lib.celerylib import run_task, locked_task, dbsession, \
 
    str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
 
from kallithea.lib.helpers import person
 
from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
 
from kallithea.lib.utils import add_cache, action_logger
 
from kallithea.lib.compat import json, OrderedDict
 
from kallithea.lib.hooks import log_create_repository
 

	
 
from kallithea.model.db import Statistics, Repository, User
 

	
 

	
 
add_cache(config)  # pragma: no cover
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
 

	
 

	
 
def get_logger(cls):
 
    if CELERY_ON:
 
        try:
 
            log = cls.get_logger()
 
        except Exception:
 
            log = logging.getLogger(__name__)
 
    else:
 
        log = logging.getLogger(__name__)
 

	
 
    return log
 
            return cls.get_logger()
 
        except AttributeError:
 
            pass
 
    return logging.getLogger(__name__)
 

	
 

	
 
@task(ignore_result=True)
 
@locked_task
 
@dbsession
 
def whoosh_index(repo_location, full_index):
 
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
    DBS = get_session()
 

	
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=DBS)\
 
                         .run(full_index=full_index)
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
 
    log = get_logger(get_commits_stats)
 
    DBS = get_session()
 
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config['app_conf']['cache_dir']
 

	
 
    log.info('running task with lockkey %s' % lockkey)
 

	
 
    try:
 
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
 

	
 
        # for js data compatibility cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is None:
 
            return True
 

	
 
        repo = repo.scm_instance
 
        repo_size = repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
        last_rev = None
 
        last_cs = None
 
        timegetter = itemgetter('time')
 

	
 
        dbrepo = DBS.query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 
        cur_stats = DBS.query(Statistics)\
 
            .filter(Statistics.repository == dbrepo).scalar()
 

	
 
        if cur_stats is not None:
 
            last_rev = cur_stats.stat_on_revision
 

	
 
        if last_rev == repo.get_changeset().revision and repo_size > 1:
 
            # pass silently without any work if we're not on first revision or
 
            # current state of parsing revision(from db marker) is the
 
            # last revision
 
            lock.release()
 
            return True
 

	
 
        if cur_stats:
 
            commits_by_day_aggregate = OrderedDict(json.loads(
 
                                        cur_stats.commit_activity_combined))
 
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)
 

	
 
        log.debug('starting parsing %s' % parse_limit)
 
        lmktime = mktime
 

	
 
        last_rev = last_rev + 1 if last_rev >= 0 else 0
 
        log.debug('Getting revisions from %s to %s' % (
 
             last_rev, last_rev + parse_limit)
 
        )
 
        for cs in repo[last_rev:last_rev + parse_limit]:
 
            log.debug('parsing %s' % cs)
 
            last_cs = cs  # remember last parsed changeset
 
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
 
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])
 

	
 
            if akc(cs.author) in co_day_auth_aggr:
 
                try:
 
                    l = [timegetter(x) for x in
 
                         co_day_auth_aggr[akc(cs.author)]['data']]
 
                    time_pos = l.index(k)
 
                except ValueError:
 
                    time_pos = None
 

	
 
                if time_pos >= 0 and time_pos is not None:
 

	
 
                    datadict = \
 
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]
 

	
 
                    datadict["commits"] += 1
 
                    datadict["added"] += len(cs.added)
 
                    datadict["changed"] += len(cs.changed)
 
                    datadict["removed"] += len(cs.removed)
 

	
 
                else:
 
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 

	
 
                        datadict = {"time": k,
 
                                    "commits": 1,
 
                                    "added": len(cs.added),
 
                                    "changed": len(cs.changed),
 
                                    "removed": len(cs.removed),
 
                                   }
 
                        co_day_auth_aggr[akc(cs.author)]['data']\
 
                            .append(datadict)
 

	
 
            else:
 
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
 
                    co_day_auth_aggr[akc(cs.author)] = {
 
                                        "label": akc(cs.author),
 
                                        "data": [{"time":k,
 
                                                 "commits":1,
 
                                                 "added":len(cs.added),
 
                                                 "changed":len(cs.changed),
 
                                                 "removed":len(cs.removed),
 
                                                 }],
 
                                        "schema": ["commits"],
 
                                        }
 

	
 
            #gather all data by day
 
            if k in commits_by_day_aggregate:
 
                commits_by_day_aggregate[k] += 1
 
            else:
 
                commits_by_day_aggregate[k] = 1
 

	
 
        overview_data = sorted(commits_by_day_aggregate.items(),
 
                               key=itemgetter(0))
 

	
 
        if not co_day_auth_aggr:
 
            co_day_auth_aggr[akc(repo.contact)] = {
 
                "label": akc(repo.contact),
 
                "data": [0, 1],
 
                "schema": ["commits"],
 
            }
 

	
 
        stats = cur_stats if cur_stats else Statistics()
 
        stats.commit_activity = json.dumps(co_day_auth_aggr)
 
        stats.commit_activity_combined = json.dumps(overview_data)
 

	
 
        log.debug('last revison %s' % last_rev)
 
        leftovers = len(repo.revisions[last_rev:])
 
        log.debug('revisions to parse %s' % leftovers)
 

	
 
        if last_rev == 0 or leftovers < parse_limit:
 
            log.debug('getting code trending stats')
 
            stats.languages = json.dumps(__get_codes_stats(repo_name))
 

	
 
        try:
 
            stats.repository = dbrepo
 
            stats.stat_on_revision = last_cs.revision if last_cs else 0
 
            DBS.add(stats)
 
            DBS.commit()
 
        except:
 
            log.error(traceback.format_exc())
 
            DBS.rollback()
 
            lock.release()
 
            return False
 

	
 
        # final release
 
        lock.release()
 

	
 
        # execute another task if celery is enabled
 
        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
 
            recurse_limit -= 1
 
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
 
                     recurse_limit)
 
        if recurse_limit <= 0:
 
            log.debug('Breaking recursive mode due to reach of recurse limit')
 
        return True
 
    except LockHeld:
 
        log.info('LockHeld')
 
        return 'Task with key %s already running' % lockkey
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def send_email(recipients, subject, body='', html_body='', headers=None):
 
    """
 
    Sends an email with defined parameters from the .ini files.
 

	
 
    :param recipients: list of recipients, if this is None, the defined email
 
        address from field 'email_to' and all admins is used instead
 
    :param subject: subject of the mail
 
    :param body: body of the mail
 
    :param html_body: html version of body
 
    """
 
    log = get_logger(send_email)
 
    assert isinstance(recipients, list), recipients
 

	
 
    email_config = config
 
    email_prefix = email_config.get('email_prefix', '')
 
    if email_prefix:
 
        subject = "%s %s" % (email_prefix, subject)
 
    if recipients is None:
 
        # if recipients are not defined we send to email_config + all admins
 
        admins = [u.email for u in User.query()
 
                  .filter(User.admin == True).all()]
 
        recipients = [email_config.get('email_to')] + admins
 
        log.warning("recipients not specified for '%s' - sending to admins %s", subject, ' '.join(recipients))
 
    elif not recipients:
 
        log.error("No recipients specified")
 
        return False
 

	
 
    mail_from = email_config.get('app_email_from', 'Kallithea')
 
    user = email_config.get('smtp_username')
 
    passwd = email_config.get('smtp_password')
 
    mail_server = email_config.get('smtp_server')
 
    mail_port = email_config.get('smtp_port')
 
    tls = str2bool(email_config.get('smtp_use_tls'))
 
    ssl = str2bool(email_config.get('smtp_use_ssl'))
 
    debug = str2bool(email_config.get('debug'))
 
    smtp_auth = email_config.get('smtp_auth')
 

	
 
    if not mail_server:
 
        log.error("SMTP mail server not configured - cannot send mail '%s' to %s", subject, ' '.join(recipients))
 
        log.warning("body:\n%s", body)
 
        log.warning("html:\n%s", html_body)
 
        return False
 

	
 
    try:
 
        m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth,
 
                       mail_port, ssl, tls, debug=debug)
 
        m.send(recipients, subject, body, html_body, headers=headers)
 
    except:
 
        log.error('Mail sending failed')
 
        log.error(traceback.format_exc())
 
        return False
 
    return True
 

	
 
@task(ignore_result=False)
 
@dbsession
 
def create_repo(form_data, cur_user):
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.user import UserModel
 
    from kallithea.model.db import Setting
 

	
 
    log = get_logger(create_repo)
 
    DBS = get_session()
 

	
 
    cur_user = UserModel(DBS)._get_user(cur_user)
 

	
 
    owner = cur_user
 
    repo_name = form_data['repo_name']
 
    repo_name_full = form_data['repo_name_full']
 
    repo_type = form_data['repo_type']
 
    description = form_data['repo_description']
 
    private = form_data['repo_private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['repo_landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 
    copy_group_permissions = form_data.get('repo_copy_permissions')
 
    fork_of = form_data.get('fork_parent_id')
 
    state = form_data.get('repo_state', Repository.STATE_PENDING)
 

	
 
    # repo creation defaults, private and repo_type are filled in form
 
    defs = Setting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_locking = defs.get('repo_enable_locking')
 
    enable_downloads = defs.get('repo_enable_downloads')
 

	
 
    try:
 
        repo = RepoModel(DBS)._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=description,
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions,
 
            copy_group_permissions=copy_group_permissions,
 
            enable_statistics=enable_statistics,
 
            enable_locking=enable_locking,
 
            enable_downloads=enable_downloads,
 
            state=state
 
        )
 

	
 
        action_logger(cur_user, 'user_created_repo',
 
                      form_data['repo_name_full'], '', DBS)
 

	
 
        DBS.commit()
 
        # now create this repo on Filesystem
 
        RepoModel(DBS)._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
 
            clone_uri=clone_uri,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception, e:
 
        log.warning('Exception %s occured when forking repository, '
 
                    'doing cleanup...' % e)
 
        # rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel(DBS)._delete_filesystem_repo(repo)
 
        raise
 

	
 
    # it's an odd fix to make celery fail task when exception occurs
 
    def on_failure(self, *args, **kwargs):
 
        pass
 

	
 
    return True
 

	
 

	
 
@task(ignore_result=False)
 
@dbsession
 
def create_repo_fork(form_data, cur_user):
 
    """
 
    Creates a fork of repository using interval VCS methods
 

	
 
    :param form_data:
 
    :param cur_user:
 
    """
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.user import UserModel
 

	
 
    log = get_logger(create_repo_fork)
 
    DBS = get_session()
 

	
 
    base_path = Repository.base_path()
 
    cur_user = UserModel(DBS)._get_user(cur_user)
 

	
 
    repo_name = form_data['repo_name']  # fork in this case
 
    repo_name_full = form_data['repo_name_full']
 

	
 
    repo_type = form_data['repo_type']
 
    owner = cur_user
 
    private = form_data['private']
 
    clone_uri = form_data.get('clone_uri')
 
    repo_group = form_data['repo_group']
 
    landing_rev = form_data['landing_rev']
 
    copy_fork_permissions = form_data.get('copy_permissions')
 

	
 
    try:
 
        fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))
 

	
 
        RepoModel(DBS)._create_repo(
 
            repo_name=repo_name_full,
 
            repo_type=repo_type,
 
            description=form_data['description'],
 
            owner=owner,
 
            private=private,
 
            clone_uri=clone_uri,
 
            repo_group=repo_group,
 
            landing_rev=landing_rev,
 
            fork_of=fork_of,
 
            copy_fork_permissions=copy_fork_permissions
 
        )
 
        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
 
                      fork_of.repo_name, '', DBS)
 
        DBS.commit()
 

	
 
        update_after_clone = form_data['update_after_clone'] # FIXME - unused!
 
        source_repo_path = os.path.join(base_path, fork_of.repo_name)
 

	
 
        # now create this repo on Filesystem
 
        RepoModel(DBS)._create_filesystem_repo(
 
            repo_name=repo_name,
 
            repo_type=repo_type,
 
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
 
            clone_uri=source_repo_path,
 
        )
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        log_create_repository(repo.get_dict(), created_by=owner.username)
 

	
 
        # update repo changeset caches initially
 
        repo.update_changeset_cache()
 

	
 
        # set new created state
 
        repo.set_state(Repository.STATE_CREATED)
 
        DBS.commit()
 
    except Exception, e:
 
        log.warning('Exception %s occured when forking repository, '
 
                    'doing cleanup...' % e)
 
        #rollback things manually !
 
        repo = Repository.get_by_repo_name(repo_name_full)
 
        if repo:
 
            Repository.delete(repo.repo_id)
 
            DBS.commit()
 
            RepoModel(DBS)._delete_filesystem_repo(repo)
 
        raise
 

	
 
    # it's an odd fix to make celery fail task when exception occurs
 
    def on_failure(self, *args, **kwargs):
 
        pass
 

	
 
    return True
 

	
 

	
 
def __get_codes_stats(repo_name):
 
    from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP
 
    repo = Repository.get_by_repo_name(repo_name).scm_instance
 

	
 
    tip = repo.get_changeset()
 
    code_stats = {}
 

	
 
    def aggregate(cs):
 
        for f in cs[2]:
 
            ext = lower(f.extension)
 
            if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary:
 
                if ext in code_stats:
 
                    code_stats[ext] += 1
 
                else:
 
                    code_stats[ext] = 1
 

	
 
    map(aggregate, tip.walk('/'))
 

	
 
    return code_stats or {}
kallithea/lib/db_manage.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.db_manage
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Database creation, and setup module for Kallithea. Used for creation
 
of database as well as for migration operations
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Apr 10, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import sys
 
import time
 
import uuid
 
import logging
 
from os.path import dirname as dn, join as jn
 

	
 
from kallithea import __dbversion__, __py_version__, EXTERN_TYPE_INTERNAL, DB_MIGRATIONS
 
from kallithea.model.user import UserModel
 
from kallithea.lib.utils import ask_ok
 
from kallithea.model import init_model
 
from kallithea.model.db import User, Permission, Ui, \
 
    Setting, UserToPerm, DbMigrateVersion, RepoGroup, \
 
    UserRepoGroupToPerm, CacheInvalidation, UserGroup, Repository
 

	
 
from sqlalchemy.engine import create_engine
 
from kallithea.model.repo_group import RepoGroupModel
 
#from kallithea.model import meta
 
from kallithea.model.meta import Session, Base
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.permission import PermissionModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def notify(msg):
 
    """
 
    Notification for migrations messages
 
    """
 
    ml = len(msg) + (4 * 2)
 
    print('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper()
 

	
 

	
 
class DbManage(object):
 
    def __init__(self, log_sql, dbconf, root, tests=False, SESSION=None, cli_args={}):
 
        self.dbname = dbconf.split('/')[-1]
 
        self.tests = tests
 
        self.root = root
 
        self.dburi = dbconf
 
        self.log_sql = log_sql
 
        self.db_exists = False
 
        self.cli_args = cli_args
 
        self.init_db(SESSION=SESSION)
 

	
 
        force_ask = self.cli_args.get('force_ask')
 
        if force_ask is not None:
 
            global ask_ok
 
            ask_ok = lambda *args, **kwargs: force_ask
 

	
 
    def init_db(self, SESSION=None):
 
        if SESSION:
 
            self.sa = SESSION
 
        else:
 
            #init new sessions
 
            engine = create_engine(self.dburi, echo=self.log_sql)
 
            init_model(engine)
 
            self.sa = Session()
 

	
 
    def create_tables(self, override=False):
 
        """
 
        Create a auth database
 
        """
 

	
 
        log.info("Any existing database is going to be destroyed")
 
        if self.tests:
 
            destroy = True
 
        else:
 
            destroy = ask_ok('Are you sure to destroy old database ? [y/n]')
 
        if not destroy:
 
            print 'Nothing done.'
 
            sys.exit(0)
 
        if destroy:
 
            Base.metadata.drop_all()
 

	
 
        checkfirst = not override
 
        Base.metadata.create_all(checkfirst=checkfirst)
 
        log.info('Created tables for %s' % self.dbname)
 

	
 
    def set_db_version(self):
 
        ver = DbMigrateVersion()
 
        ver.version = __dbversion__
 
        ver.repository_id = DB_MIGRATIONS
 
        ver.repository_path = 'versions'
 
        self.sa.add(ver)
 
        log.info('db version set to: %s' % __dbversion__)
 

	
 
    def upgrade(self):
 
        """
 
        Upgrades given database schema to given revision following
 
        all needed steps, to perform the upgrade
 

	
 
        """
 

	
 
        from kallithea.lib.dbmigrate.migrate.versioning import api
 
        from kallithea.lib.dbmigrate.migrate.exceptions import \
 
            DatabaseNotControlledError
 

	
 
        if 'sqlite' in self.dburi:
 
            print (
 
               '********************** WARNING **********************\n'
 
               'Make sure your version of sqlite is at least 3.7.X.  \n'
 
               'Earlier versions are known to fail on some migrations\n'
 
               '*****************************************************\n')
 

	
 
        upgrade = ask_ok('You are about to perform database upgrade, make '
 
                         'sure You backed up your database before. '
 
                         'Continue ? [y/n]')
 
        if not upgrade:
 
            print 'No upgrade performed'
 
            sys.exit(0)
 

	
 
        repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
 
                             'kallithea/lib/dbmigrate')
 
        db_uri = self.dburi
 

	
 
        try:
 
            curr_version = api.db_version(db_uri, repository_path)
 
            msg = ('Found current database under version '
 
                   'control with version %s' % curr_version)
 

	
 
        except (RuntimeError, DatabaseNotControlledError):
 
            curr_version = 1
 
            msg = ('Current database is not under version control. Setting '
 
                   'as version %s' % curr_version)
 
            api.version_control(db_uri, repository_path, curr_version)
 

	
 
        notify(msg)
 
        if curr_version == __dbversion__:
 
            print 'This database is already at the newest version'
 
            sys.exit(0)
 

	
 
        # clear cache keys
 
        log.info("Clearing cache keys now...")
 
        CacheInvalidation.clear_cache()
 

	
 
        upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
 
        notify('attempting to do database upgrade from '
 
               'version %s to version %s' % (curr_version, __dbversion__))
 

	
 
        # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
 
        _step = None
 
        for step in upgrade_steps:
 
            notify('performing upgrade step %s' % step)
 
            time.sleep(0.5)
 

	
 
            api.upgrade(db_uri, repository_path, step)
 
            notify('schema upgrade for step %s completed' % (step,))
 

	
 
            _step = step
 

	
 
        notify('upgrade to version %s successful' % _step)
 

	
 
    def fix_repo_paths(self):
 
        """
 
        Fixes a old kallithea version path into new one without a '*'
 
        """
 

	
 
        paths = self.sa.query(Ui)\
 
                .filter(Ui.ui_key == '/')\
 
                .scalar()
 

	
 
        paths.ui_value = paths.ui_value.replace('*', '')
 

	
 
        try:
 
            self.sa.add(paths)
 
            self.sa.commit()
 
        except Exception:
 
            self.sa.rollback()
 
            raise
 
        self.sa.add(paths)
 
        self.sa.commit()
 

	
 
    def fix_default_user(self):
 
        """
 
        Fixes a old default user with some 'nicer' default values,
 
        used mostly for anonymous access
 
        """
 
        def_user = self.sa.query(User)\
 
                .filter(User.username == User.DEFAULT_USER)\
 
                .one()
 

	
 
        def_user.name = 'Anonymous'
 
        def_user.lastname = 'User'
 
        def_user.email = 'anonymous@kallithea-scm.org'
 

	
 
        try:
 
            self.sa.add(def_user)
 
            self.sa.commit()
 
        except Exception:
 
            self.sa.rollback()
 
            raise
 
        self.sa.add(def_user)
 
        self.sa.commit()
 

	
 
    def fix_settings(self):
 
        """
 
        Fixes kallithea settings adds ga_code key for google analytics
 
        """
 

	
 
        hgsettings3 = Setting('ga_code', '')
 

	
 
        try:
 
            self.sa.add(hgsettings3)
 
            self.sa.commit()
 
        except Exception:
 
            self.sa.rollback()
 
            raise
 
        self.sa.add(hgsettings3)
 
        self.sa.commit()
 

	
 
    def admin_prompt(self, second=False):
 
        if not self.tests:
 
            import getpass
 

	
 
            # defaults
 
            defaults = self.cli_args
 
            username = defaults.get('username')
 
            password = defaults.get('password')
 
            email = defaults.get('email')
 

	
 
            def get_password():
 
                password = getpass.getpass('Specify admin password '
 
                                           '(min 6 chars):')
 
                confirm = getpass.getpass('Confirm password:')
 

	
 
                if password != confirm:
 
                    log.error('passwords mismatch')
 
                    return False
 
                if len(password) < 6:
 
                    log.error('password is to short use at least 6 characters')
 
                    return False
 

	
 
                return password
 
            if username is None:
 
                username = raw_input('Specify admin username:')
 
            if password is None:
 
                password = get_password()
 
                if not password:
 
                    #second try
 
                    password = get_password()
 
                    if not password:
 
                        sys.exit()
 
            if email is None:
 
                email = raw_input('Specify admin email:')
 
            self.create_user(username, password, email, True)
 
        else:
 
            log.info('creating admin and regular test users')
 
            from kallithea.tests import TEST_USER_ADMIN_LOGIN, \
 
            TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
 
            TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
 
            TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
 
            TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
 

	
 
            self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
 
                             TEST_USER_ADMIN_EMAIL, True)
 

	
 
            self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
 
                             TEST_USER_REGULAR_EMAIL, False)
 

	
 
            self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS,
 
                             TEST_USER_REGULAR2_EMAIL, False)
 

	
 
    def create_ui_settings(self, repo_store_path):
 
        """
 
        Creates ui settings, fills out hooks
 
        """
 

	
 
        #HOOKS
 
        hooks1_key = Ui.HOOK_UPDATE
 
        hooks1_ = self.sa.query(Ui)\
 
            .filter(Ui.ui_key == hooks1_key).scalar()
 

	
 
        hooks1 = Ui() if hooks1_ is None else hooks1_
 
        hooks1.ui_section = 'hooks'
 
        hooks1.ui_key = hooks1_key
 
        hooks1.ui_value = 'hg update >&2'
 
        hooks1.ui_active = False
 
        self.sa.add(hooks1)
 

	
 
        hooks2_key = Ui.HOOK_REPO_SIZE
 
        hooks2_ = self.sa.query(Ui)\
 
            .filter(Ui.ui_key == hooks2_key).scalar()
 
        hooks2 = Ui() if hooks2_ is None else hooks2_
 
        hooks2.ui_section = 'hooks'
 
        hooks2.ui_key = hooks2_key
 
        hooks2.ui_value = 'python:kallithea.lib.hooks.repo_size'
 
        self.sa.add(hooks2)
 

	
 
        hooks3 = Ui()
 
        hooks3.ui_section = 'hooks'
 
        hooks3.ui_key = Ui.HOOK_PUSH
 
        hooks3.ui_value = 'python:kallithea.lib.hooks.log_push_action'
 
        self.sa.add(hooks3)
 

	
 
        hooks4 = Ui()
 
        hooks4.ui_section = 'hooks'
 
        hooks4.ui_key = Ui.HOOK_PRE_PUSH
 
        hooks4.ui_value = 'python:kallithea.lib.hooks.pre_push'
 
        self.sa.add(hooks4)
 

	
 
        hooks5 = Ui()
 
        hooks5.ui_section = 'hooks'
 
        hooks5.ui_key = Ui.HOOK_PULL
 
        hooks5.ui_value = 'python:kallithea.lib.hooks.log_pull_action'
 
        self.sa.add(hooks5)
 

	
 
        hooks6 = Ui()
 
        hooks6.ui_section = 'hooks'
 
        hooks6.ui_key = Ui.HOOK_PRE_PULL
 
        hooks6.ui_value = 'python:kallithea.lib.hooks.pre_pull'
 
        self.sa.add(hooks6)
 

	
 
        # enable largefiles
 
        largefiles = Ui()
 
        largefiles.ui_section = 'extensions'
 
        largefiles.ui_key = 'largefiles'
 
        largefiles.ui_value = ''
 
        self.sa.add(largefiles)
 

	
 
        # set default largefiles cache dir, defaults to
 
        # /repo location/.cache/largefiles
 
        largefiles = Ui()
 
        largefiles.ui_section = 'largefiles'
 
        largefiles.ui_key = 'usercache'
 
        largefiles.ui_value = os.path.join(repo_store_path, '.cache',
 
                                           'largefiles')
 
        self.sa.add(largefiles)
 

	
 
        # enable hgsubversion disabled by default
 
        hgsubversion = Ui()
 
        hgsubversion.ui_section = 'extensions'
 
        hgsubversion.ui_key = 'hgsubversion'
 
        hgsubversion.ui_value = ''
 
        hgsubversion.ui_active = False
 
        self.sa.add(hgsubversion)
 

	
 
        # enable hggit disabled by default
 
        hggit = Ui()
 
        hggit.ui_section = 'extensions'
 
        hggit.ui_key = 'hggit'
 
        hggit.ui_value = ''
 
        hggit.ui_active = False
 
        self.sa.add(hggit)
 

	
 
    def create_auth_plugin_options(self, skip_existing=False):
 
        """
 
        Create default auth plugin settings, and make it active
 

	
 
        :param skip_existing:
 
        """
 

	
 
        for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'),
 
                     ('auth_internal_enabled', 'True', 'bool')]:
 
            if skip_existing and Setting.get_by_name(k) != None:
 
                log.debug('Skipping option %s' % k)
 
                continue
 
            setting = Setting(k, v, t)
 
            self.sa.add(setting)
 

	
 
    def create_default_options(self, skip_existing=False):
 
        """Creates default settings"""
 

	
 
        for k, v, t in [
 
            ('default_repo_enable_locking',  False, 'bool'),
 
            ('default_repo_enable_downloads', False, 'bool'),
 
            ('default_repo_enable_statistics', False, 'bool'),
 
            ('default_repo_private', False, 'bool'),
 
            ('default_repo_type', 'hg', 'unicode')]:
 

	
 
            if skip_existing and Setting.get_by_name(k) is not None:
 
                log.debug('Skipping option %s' % k)
 
                continue
 
            setting = Setting(k, v, t)
 
            self.sa.add(setting)
 

	
 
    def fixup_groups(self):
 
        def_usr = User.get_default_user()
 
        for g in RepoGroup.query().all():
 
            g.group_name = g.get_new_name(g.name)
 
            self.sa.add(g)
 
            # get default perm
 
            default = UserRepoGroupToPerm.query()\
 
                .filter(UserRepoGroupToPerm.group == g)\
 
                .filter(UserRepoGroupToPerm.user == def_usr)\
 
                .scalar()
 

	
 
            if default is None:
 
                log.debug('missing default permission for group %s adding' % g)
 
                perm_obj = RepoGroupModel()._create_default_perms(g)
 
                self.sa.add(perm_obj)
 

	
 
    def reset_permissions(self, username):
 
        """
 
        Resets permissions to default state, usefull when old systems had
 
        bad permissions, we must clean them up
 

	
 
        :param username:
 
        """
 
        default_user = User.get_by_username(username)
 
        if not default_user:
 
            return
 

	
 
        u2p = UserToPerm.query()\
 
            .filter(UserToPerm.user == default_user).all()
 
        fixed = False
 
        if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS):
 
            for p in u2p:
 
                Session().delete(p)
 
            fixed = True
 
            self.populate_default_permissions()
 
        return fixed
 

	
 
    def update_repo_info(self):
 
        RepoModel.update_repoinfo()
 

	
 
    def config_prompt(self, test_repo_path='', retries=3):
 
        defaults = self.cli_args
 
        _path = defaults.get('repos_location')
 
        if retries == 3:
 
            log.info('Setting up repositories config')
 

	
 
        if _path is not None:
 
            path = _path
 
        elif not self.tests and not test_repo_path:
 
            path = raw_input(
 
                 'Enter a valid absolute path to store repositories. '
 
                 'All repositories in that path will be added automatically:'
 
            )
 
        else:
 
            path = test_repo_path
 
        path_ok = True
 

	
 
        # check proper dir
 
        if not os.path.isdir(path):
 
            path_ok = False
 
            log.error('Given path %s is not a valid directory' % (path,))
 

	
 
        elif not os.path.isabs(path):
 
            path_ok = False
 
            log.error('Given path %s is not an absolute path' % (path,))
 

	
 
        # check if path is at least readable.
 
        if not os.access(path, os.R_OK):
 
            path_ok = False
 
            log.error('Given path %s is not readable' % (path,))
 

	
 
        # check write access, warn user about non writeable paths
 
        elif not os.access(path, os.W_OK) and path_ok:
 
            log.warning('No write permission to given path %s' % (path,))
 
            if not ask_ok('Given path %s is not writeable, do you want to '
 
                          'continue with read only mode ? [y/n]' % (path,)):
 
                log.error('Canceled by user')
 
                sys.exit(-1)
 

	
 
        if retries == 0:
 
            sys.exit('max retries reached')
 
        if not path_ok:
 
            retries -= 1
 
            return self.config_prompt(test_repo_path, retries)
 

	
 
        real_path = os.path.normpath(os.path.realpath(path))
 

	
 
        if real_path != os.path.normpath(path):
 
            log.warning('Using normalized path %s instead of %s' % (real_path, path))
 

	
 
        return real_path
 

	
 
    def create_settings(self, path):
 

	
 
        self.create_ui_settings(path)
 

	
 
        ui_config = [
 
            ('web', 'push_ssl', 'false'),
 
            ('web', 'allow_archive', 'gz zip bz2'),
 
            ('web', 'allow_push', '*'),
 
            ('web', 'baseurl', '/'),
 
            ('paths', '/', path),
 
            #('phases', 'publish', 'false')
 
        ]
 
        for section, key, value in ui_config:
 
            ui_conf = Ui()
 
            setattr(ui_conf, 'ui_section', section)
 
            setattr(ui_conf, 'ui_key', key)
 
            setattr(ui_conf, 'ui_value', value)
 
            self.sa.add(ui_conf)
 

	
 
        settings = [
 
            ('realm', 'Kallithea', 'unicode'),
 
            ('title', '', 'unicode'),
 
            ('ga_code', '', 'unicode'),
 
            ('show_public_icon', True, 'bool'),
 
            ('show_private_icon', True, 'bool'),
 
            ('stylify_metatags', False, 'bool'),
 
            ('dashboard_items', 100, 'int'),
 
            ('admin_grid_items', 25, 'int'),
 
            ('show_version', True, 'bool'),
 
            ('use_gravatar', True, 'bool'),
 
            ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
 
            ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
 
            ('update_url', Setting.DEFAULT_UPDATE_URL, 'unicode'),
 
        ]
 
        for key, val, type_ in settings:
 
            sett = Setting(key, val, type_)
 
            self.sa.add(sett)
 

	
 
        self.create_auth_plugin_options()
 
        self.create_default_options()
 

	
 
        log.info('created ui config')
 

	
 
    def create_user(self, username, password, email='', admin=False):
 
        log.info('creating user %s' % username)
 
        UserModel().create_or_update(username, password, email,
 
                                     firstname='Kallithea', lastname='Admin',
 
                                     active=True, admin=admin,
 
                                     extern_type=EXTERN_TYPE_INTERNAL)
 

	
 
    def create_default_user(self):
 
        log.info('creating default user')
 
        # create default user for handling default permissions.
 
        user = UserModel().create_or_update(username=User.DEFAULT_USER,
 
                                            password=str(uuid.uuid1())[:20],
 
                                            email='anonymous@kallithea-scm.org',
 
                                            firstname='Anonymous',
 
                                            lastname='User')
 
        # based on configuration options activate/deactive this user which
 
        # controlls anonymous access
 
        if self.cli_args.get('public_access') is False:
 
            log.info('Public access disabled')
 
            user.active = False
 
            Session().add(user)
 
            Session().commit()
 

	
 
    def create_permissions(self):
 
        """
 
        Creates all permissions defined in the system
 
        """
 
        # module.(access|create|change|delete)_[name]
 
        # module.(none|read|write|admin)
 
        log.info('creating permissions')
 
        PermissionModel(self.sa).create_permissions()
 

	
 
    def populate_default_permissions(self):
 
        """
 
        Populate default permissions. It will create only the default
 
        permissions that are missing, and not alter already defined ones
 
        """
 
        log.info('creating default user permissions')
 
        PermissionModel(self.sa).create_default_permissions(user=User.DEFAULT_USER)
 

	
 
    @staticmethod
 
    def check_waitress():
 
        """
 
        Function executed at the end of setup
 
        """
 
        if not __py_version__ >= (2, 6):
 
            notify('Python2.5 detected, please switch '
 
                   'egg:waitress#main -> egg:Paste#http '
 
                   'in your .ini file')
kallithea/lib/helpers.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Helper functions
 

	
 
Consists of functions to typically be used within templates, but also
 
available to Controllers. This module is available to both as 'h'.
 
"""
 
import random
 
import hashlib
 
import StringIO
 
import math
 
import logging
 
import re
 
import urlparse
 
import textwrap
 

	
 
from pygments.formatters.html import HtmlFormatter
 
from pygments import highlight as code_highlight
 
from pylons import url
 
from pylons.i18n.translation import _, ungettext
 
from hashlib import md5
 

	
 
from webhelpers.html import literal, HTML, escape
 
from webhelpers.html.tools import *
 
from webhelpers.html.builder import make_tag
 
from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
 
    end_form, file, form, hidden, image, javascript_link, link_to, \
 
    link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
 
    submit, text, password, textarea, title, ul, xml_declaration, radio
 
from webhelpers.html.tools import auto_link, button_to, highlight, \
 
    js_obfuscate, mail_to, strip_links, strip_tags, tag_re
 
from webhelpers.number import format_byte_size, format_bit_size
 
from webhelpers.pylonslib import Flash as _Flash
 
from webhelpers.pylonslib.secure_form import secure_form
 
from webhelpers.text import chop_at, collapse, convert_accented_entities, \
 
    convert_misc_entities, lchop, plural, rchop, remove_formatting, \
 
    replace_whitespace, urlify, truncate, wrap_paragraphs
 
from webhelpers.date import time_ago_in_words
 
from webhelpers.paginate import Page as _Page
 
from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
 
    convert_boolean_attrs, NotGiven, _make_safe_id_component
 

	
 
from kallithea.lib.annotate import annotate_highlight
 
from kallithea.lib.utils import repo_name_slug, get_custom_lexer
 
from kallithea.lib.utils2 import str2bool, safe_unicode, safe_str, \
 
    get_changeset_safe, datetime_to_time, time_to_datetime, AttributeDict,\
 
    safe_int
 
from kallithea.lib.markup_renderer import MarkupRenderer, url_re
 
from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError
 
from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset
 
from kallithea.config.conf import DATE_FORMAT, DATETIME_FORMAT
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.db import URL_SEP, Permission
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def canonical_url(*args, **kargs):
 
    '''Like url(x, qualified=True), but returns url that not only is qualified
 
    but also canonical, as configured in canonical_url'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        kargs['host'] = parts[1].split('/', 1)[0]
 
        kargs['protocol'] = parts[0]
 
    except IndexError:
 
        kargs['qualified'] = True
 
    return url(*args, **kargs)
 

	
 
def canonical_hostname():
 
    '''Return canonical hostname of system'''
 
    from kallithea import CONFIG
 
    try:
 
        parts = CONFIG.get('canonical_url', '').split('://', 1)
 
        return parts[1].split('/', 1)[0]
 
    except IndexError:
 
        parts = url('home', qualified=True).split('://', 1)
 
        return parts[1].split('/', 1)[0]
 

	
 
def html_escape(text, html_escape_table=None):
 
    """Produce entities within text."""
 
    if not html_escape_table:
 
        html_escape_table = {
 
            "&": "&amp;",
 
            '"': "&quot;",
 
            "'": "&apos;",
 
            ">": "&gt;",
 
            "<": "&lt;",
 
        }
 
    return "".join(html_escape_table.get(c, c) for c in text)
 

	
 

	
 
def shorter(text, size=20):
 
    postfix = '...'
 
    if len(text) > size:
 
        return text[:size - len(postfix)] + postfix
 
    return text
 

	
 

	
 
def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
 
    """
 
    Reset button
 
    """
 
    _set_input_attrs(attrs, type, name, value)
 
    _set_id_attr(attrs, id, name)
 
    convert_boolean_attrs(attrs, ["disabled"])
 
    return HTML.input(**attrs)
 

	
 
reset = _reset
 
safeid = _make_safe_id_component
 

	
 

	
 
def FID(raw_id, path):
 
    """
 
    Creates a uniqe ID for filenode based on it's hash of path and revision
 
    it's safe to use in urls
 

	
 
    :param raw_id:
 
    :param path:
 
    """
 

	
 
    return 'C-%s-%s' % (short_id(raw_id), md5(safe_str(path)).hexdigest()[:12])
 

	
 

	
 
def get_token():
 
    """Return the current authentication token, creating one if one doesn't
 
    already exist.
 
    """
 
    token_key = "_authentication_token"
 
    from pylons import session
 
    if not token_key in session:
 
        try:
 
            token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
 
        except AttributeError:  # Python < 2.4
 
            token = hashlib.sha1(str(random.randrange(2 ** 128))).hexdigest()
 
        session[token_key] = token
 
        if hasattr(session, 'save'):
 
            session.save()
 
    return session[token_key]
 

	
 

	
 
class _GetError(object):
 
    """Get error from form_errors, and represent it as span wrapped error
 
    message
 

	
 
    :param field_name: field to fetch errors for
 
    :param form_errors: form errors dict
 
    """
 

	
 
    def __call__(self, field_name, form_errors):
 
        tmpl = """<span class="error_msg">%s</span>"""
 
        if form_errors and field_name in form_errors:
 
            return literal(tmpl % form_errors.get(field_name))
 

	
 
get_error = _GetError()
 

	
 

	
 
class _ToolTip(object):
 

	
 
    def __call__(self, tooltip_title, trim_at=50):
 
        """
 
        Special function just to wrap our text into nice formatted
 
        autowrapped text
 

	
 
        :param tooltip_title:
 
        """
 
        tooltip_title = escape(tooltip_title)
 
        tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
 
        return tooltip_title
 
tooltip = _ToolTip()
 

	
 

	
 
class _FilesBreadCrumbs(object):
 

	
 
    def __call__(self, repo_name, rev, paths):
 
        if isinstance(paths, str):
 
            paths = safe_unicode(paths)
 
        url_l = [link_to(repo_name, url('files_home',
 
                                        repo_name=repo_name,
 
                                        revision=rev, f_path=''),
 
                         class_='ypjax-link')]
 
        paths_l = paths.split('/')
 
        for cnt, p in enumerate(paths_l):
 
            if p != '':
 
                url_l.append(link_to(p,
 
                                     url('files_home',
 
                                         repo_name=repo_name,
 
                                         revision=rev,
 
                                         f_path='/'.join(paths_l[:cnt + 1])
 
                                         ),
 
                                     class_='ypjax-link'
 
                                     )
 
                             )
 

	
 
        return literal('/'.join(url_l))
 

	
 
files_breadcrumbs = _FilesBreadCrumbs()
 

	
 

	
 
class CodeHtmlFormatter(HtmlFormatter):
 
    """
 
    My code Html Formatter for source codes
 
    """
 

	
 
    def wrap(self, source, outfile):
 
        return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
 

	
 
    def _wrap_code(self, source):
 
        for cnt, it in enumerate(source):
 
            i, t = it
 
            t = '<div id="L%s">%s</div>' % (cnt + 1, t)
 
            yield i, t
 

	
 
    def _wrap_tablelinenos(self, inner):
 
        dummyoutfile = StringIO.StringIO()
 
        lncount = 0
 
        for t, line in inner:
 
            if t:
 
                lncount += 1
 
            dummyoutfile.write(line)
 

	
 
        fl = self.linenostart
 
        mw = len(str(lncount + fl - 1))
 
        sp = self.linenospecial
 
        st = self.linenostep
 
        la = self.lineanchors
 
        aln = self.anchorlinenos
 
        nocls = self.noclasses
 
        if sp:
 
            lines = []
 

	
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if i % sp == 0:
 
                        if aln:
 
                            lines.append('<a href="#%s%d" class="special">%*d</a>' %
 
                                         (la, i, mw, i))
 
                        else:
 
                            lines.append('<span class="special">%*d</span>' % (mw, i))
 
                    else:
 
                        if aln:
 
                            lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                        else:
 
                            lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 
        else:
 
            lines = []
 
            for i in range(fl, fl + lncount):
 
                if i % st == 0:
 
                    if aln:
 
                        lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
 
                    else:
 
                        lines.append('%*d' % (mw, i))
 
                else:
 
                    lines.append('')
 
            ls = '\n'.join(lines)
 

	
 
        # in case you wonder about the seemingly redundant <div> here: since the
 
        # content in the other cell also is wrapped in a div, some browsers in
 
        # some configurations seem to mess up the formatting...
 
        if nocls:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td><div class="linenodiv" '
 
                      'style="background-color: #f0f0f0; padding-right: 10px">'
 
                      '<pre style="line-height: 125%">' +
 
                      ls + '</pre></div></td><td id="hlcode" class="code">')
 
        else:
 
            yield 0, ('<table class="%stable">' % self.cssclass +
 
                      '<tr><td class="linenos"><div class="linenodiv"><pre>' +
 
                      ls + '</pre></div></td><td id="hlcode" class="code">')
 
        yield 0, dummyoutfile.getvalue()
 
        yield 0, '</td></tr></table>'
 

	
 

	
 
_whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)')
 

	
 
def _markup_whitespace(m):
 
    groups = m.groups()
 
    if groups[0]:
 
        return '<u>\t</u>'
 
    if groups[1]:
 
        return ' <i></i>'
 

	
 
def markup_whitespace(s):
 
    return _whitespace_re.sub(_markup_whitespace, s)
 

	
 
def pygmentize(filenode, **kwargs):
 
    """
 
    pygmentize function using pygments
 

	
 
    :param filenode:
 
    """
 
    lexer = get_custom_lexer(filenode.extension) or filenode.lexer
 
    return literal(markup_whitespace(
 
        code_highlight(filenode.content, lexer, CodeHtmlFormatter(**kwargs))))
 

	
 

	
 
def pygmentize_annotation(repo_name, filenode, **kwargs):
 
    """
 
    pygmentize function for annotation
 

	
 
    :param filenode:
 
    """
 

	
 
    color_dict = {}
 

	
 
    def gen_color(n=10000):
 
        """generator for getting n of evenly distributed colors using
 
        hsv color and golden ratio. It always return same order of colors
 

	
 
        :returns: RGB tuple
 
        """
 

	
 
        def hsv_to_rgb(h, s, v):
 
            if s == 0.0:
 
                return v, v, v
 
            i = int(h * 6.0)  # XXX assume int() truncates!
 
            f = (h * 6.0) - i
 
            p = v * (1.0 - s)
 
            q = v * (1.0 - s * f)
 
            t = v * (1.0 - s * (1.0 - f))
 
            i = i % 6
 
            if i == 0:
 
                return v, t, p
 
            if i == 1:
 
                return q, v, p
 
            if i == 2:
 
                return p, v, t
 
            if i == 3:
 
                return p, q, v
 
            if i == 4:
 
                return t, p, v
 
            if i == 5:
 
                return v, p, q
 

	
 
        golden_ratio = 0.618033988749895
 
        h = 0.22717784590367374
 

	
 
        for _unused in xrange(n):
 
            h += golden_ratio
 
            h %= 1
 
            HSV_tuple = [h, 0.95, 0.95]
 
            RGB_tuple = hsv_to_rgb(*HSV_tuple)
 
            yield map(lambda x: str(int(x * 256)), RGB_tuple)
 

	
 
    cgenerator = gen_color()
 

	
 
    def get_color_string(cs):
 
        if cs in color_dict:
 
            col = color_dict[cs]
 
        else:
 
            col = color_dict[cs] = cgenerator.next()
 
        return "color: rgb(%s)! important;" % (', '.join(col))
 

	
 
    def url_func(repo_name):
 

	
 
        def _url_func(changeset):
 
            author = changeset.author
 
            date = changeset.date
 
            message = tooltip(changeset.message)
 

	
 
            tooltip_html = ("<div style='font-size:0.8em'><b>Author:</b>"
 
                            " %s<br/><b>Date:</b> %s</b><br/><b>Message:"
 
                            "</b> %s<br/></div>")
 

	
 
            tooltip_html = tooltip_html % (author, date, message)
 
            lnk_format = show_id(changeset)
 
            uri = link_to(
 
                    lnk_format,
 
                    url('changeset_home', repo_name=repo_name,
 
                        revision=changeset.raw_id),
 
                    style=get_color_string(changeset.raw_id),
 
                    class_='tooltip',
 
                    title=tooltip_html
 
                  )
 

	
 
            uri += '\n'
 
            return uri
 
        return _url_func
 

	
 
    return literal(markup_whitespace(annotate_highlight(filenode, url_func(repo_name), **kwargs)))
 

	
 

	
 
def is_following_repo(repo_name, user_id):
 
    from kallithea.model.scm import ScmModel
 
    return ScmModel().is_following_repo(repo_name, user_id)
 

	
 
class _Message(object):
 
    """A message returned by ``Flash.pop_messages()``.
 

	
 
    Converting the message to a string returns the message text. Instances
 
    also have the following attributes:
 

	
 
    * ``message``: the message text.
 
    * ``category``: the category specified when the message was created.
 
    """
 

	
 
    def __init__(self, category, message):
 
        self.category = category
 
        self.message = message
 

	
 
    def __str__(self):
 
        return self.message
 

	
 
    __unicode__ = __str__
 

	
 
    def __html__(self):
 
        return escape(safe_unicode(self.message))
 

	
 
class Flash(_Flash):
 

	
 
    def pop_messages(self):
 
        """Return all accumulated messages and delete them from the session.
 

	
 
        The return value is a list of ``Message`` objects.
 
        """
 
        from pylons import session
 
        messages = session.pop(self.session_key, [])
 
        session.save()
 
        return [_Message(*m) for m in messages]
 

	
 
flash = Flash()
 

	
 
#==============================================================================
 
# SCM FILTERS available via h.
 
#==============================================================================
 
from kallithea.lib.vcs.utils import author_name, author_email
 
from kallithea.lib.utils2 import credentials_filter, age as _age
 
from kallithea.model.db import User, ChangesetStatus
 

	
 
age = lambda  x, y=False: _age(x, y)
 
capitalize = lambda x: x.capitalize()
 
email = author_email
 
short_id = lambda x: x[:12]
 
hide_credentials = lambda x: ''.join(credentials_filter(x))
 

	
 

	
 
def show_id(cs):
 
    """
 
    Configurable function that shows ID
 
    by default it's r123:fffeeefffeee
 

	
 
    :param cs: changeset instance
 
    """
 
    from kallithea import CONFIG
 
    def_len = safe_int(CONFIG.get('show_sha_length', 12))
 
    show_rev = str2bool(CONFIG.get('show_revision_number', False))
 

	
 
    raw_id = cs.raw_id[:def_len]
 
    if show_rev:
 
        return 'r%s:%s' % (cs.revision, raw_id)
 
    else:
 
        return '%s' % (raw_id)
 

	
 

	
 
def fmt_date(date):
 
    if date:
 
        _fmt = u"%a, %d %b %Y %H:%M:%S".encode('utf8')
 
        return date.strftime(_fmt).decode('utf8')
 

	
 
    return ""
 

	
 

	
 
def is_git(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'git'
 

	
 

	
 
def is_hg(repository):
 
    if hasattr(repository, 'alias'):
 
        _type = repository.alias
 
    elif hasattr(repository, 'repo_type'):
 
        _type = repository.repo_type
 
    else:
 
        _type = repository
 
    return _type == 'hg'
 

	
 

	
 
def email_or_none(author):
 
    if not author:
 
        return None
 
    # extract email from the commit string
 
    _email = email(author)
 
    if _email:
 
        # check it against Kallithea database, and use the MAIN email for this
 
        # user
 
        user = User.get_by_email(_email, case_insensitive=True, cache=True)
 
        if user is not None:
 
            return user.email
 
        return _email
 

	
 
    # See if it contains a username we can get an email from
 
    user = User.get_by_username(author_name(author), case_insensitive=True,
 
                                cache=True)
 
    if user is not None:
 
        return user.email
 

	
 
    # No valid email, not a valid user in the system, none!
 
    return None
 

	
 

	
 
def person(author, show_attr="username"):
 
    # attr to return from fetched user
 
    person_getter = lambda usr: getattr(usr, show_attr)
 

	
 
    # if author is already an instance use it for extraction
 
    if isinstance(author, User):
 
        return person_getter(author)
 

	
 
    # Valid email in the attribute passed, see if they're in the system
 
    _email = email(author)
 
    if _email:
 
        user = User.get_by_email(_email, case_insensitive=True, cache=True)
 
        if user is not None:
 
            return person_getter(user)
 

	
 
    # Maybe it's a username?
 
    _author = author_name(author)
 
    user = User.get_by_username(_author, case_insensitive=True,
 
                                cache=True)
 
    if user is not None:
 
        return person_getter(user)
 

	
 
    # Still nothing?  Just pass back the author name if any, else the email
 
    return _author or _email
 

	
 

	
 
def person_by_id(id_, show_attr="username"):
 
    # attr to return from fetched user
 
    person_getter = lambda usr: getattr(usr, show_attr)
 

	
 
    #maybe it's an ID ?
 
    if str(id_).isdigit() or isinstance(id_, int):
 
        id_ = int(id_)
 
        user = User.get(id_)
 
        if user is not None:
 
            return person_getter(user)
 
    return id_
 

	
 

	
 
def desc_stylize(value):
 
    """
 
    converts tags from value into html equivalent
 

	
 
    :param value:
 
    """
 
    if not value:
 
        return ''
 

	
 
    value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
 
                   '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
 
    value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
 
                   '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
 
    value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
 
                   '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
 
    value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
 
                   '<div class="metatag" tag="lang">\\2</div>', value)
 
    value = re.sub(r'\[([a-z]+)\]',
 
                  '<div class="metatag" tag="\\1">\\1</div>', value)
 

	
 
    return value
 

	
 

	
 
def boolicon(value):
 
    """Returns boolean value of a value, represented as small html image of true/false
 
    icons
 

	
 
    :param value: value
 
    """
 

	
 
    if value:
 
        return HTML.tag('i', class_="icon-ok")
 
    else:
 
        return HTML.tag('i', class_="icon-minus-circled")
 

	
 

	
 
def action_parser(user_log, feed=False, parse_cs=False):
 
    """
 
    This helper will action_map the specified string action into translated
 
    fancy names with icons and links
 

	
 
    :param user_log: user log instance
 
    :param feed: use output for feeds (no html and fancy icons)
 
    :param parse_cs: parse Changesets into VCS instances
 
    """
 

	
 
    action = user_log.action
 
    action_params = ' '
 

	
 
    x = action.split(':')
 

	
 
    if len(x) > 1:
 
        action, action_params = x
 

	
 
    def get_cs_links():
 
        revs_limit = 3  # display this amount always
 
        revs_top_limit = 50  # show upto this amount of changesets hidden
 
        revs_ids = action_params.split(',')
 
        deleted = user_log.repository is None
 
        if deleted:
 
            return ','.join(revs_ids)
 

	
 
        repo_name = user_log.repository.repo_name
 

	
 
        def lnk(rev, repo_name):
 
            if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict):
 
                lazy_cs = True
 
                if getattr(rev, 'op', None) and getattr(rev, 'ref_name', None):
 
                    lazy_cs = False
 
                    lbl = '?'
 
                    if rev.op == 'delete_branch':
 
                        lbl = '%s' % _('Deleted branch: %s') % rev.ref_name
 
                        title = ''
 
                    elif rev.op == 'tag':
 
                        lbl = '%s' % _('Created tag: %s') % rev.ref_name
 
                        title = ''
 
                    _url = '#'
 

	
 
                else:
 
                    lbl = '%s' % (rev.short_id[:8])
 
                    _url = url('changeset_home', repo_name=repo_name,
 
                               revision=rev.raw_id)
 
                    title = tooltip(rev.message)
 
            else:
 
                ## changeset cannot be found/striped/removed etc.
 
                lbl = ('%s' % rev)[:12]
 
                _url = '#'
 
                title = _('Changeset not found')
 
            if parse_cs:
 
                return link_to(lbl, _url, title=title, class_='tooltip')
 
            return link_to(lbl, _url, raw_id=rev.raw_id, repo_name=repo_name,
 
                           class_='lazy-cs' if lazy_cs else '')
 

	
 
        def _get_op(rev_txt):
 
            _op = None
 
            _name = rev_txt
 
            if len(rev_txt.split('=>')) == 2:
 
                _op, _name = rev_txt.split('=>')
 
            return _op, _name
 

	
 
        revs = []
 
        if len(filter(lambda v: v != '', revs_ids)) > 0:
 
            repo = None
 
            for rev in revs_ids[:revs_top_limit]:
 
                _op, _name = _get_op(rev)
 

	
 
                # we want parsed changesets, or new log store format is bad
 
                if parse_cs:
 
                    try:
 
                        if repo is None:
 
                            repo = user_log.repository.scm_instance
 
                        _rev = repo.get_changeset(rev)
 
                        revs.append(_rev)
 
                    except ChangesetDoesNotExistError:
 
                        log.error('cannot find revision %s in this repo' % rev)
 
                        revs.append(rev)
 
                        continue
 
                else:
 
                    _rev = AttributeDict({
 
                        'short_id': rev[:12],
 
                        'raw_id': rev,
 
                        'message': '',
 
                        'op': _op,
 
                        'ref_name': _name
 
                    })
 
                    revs.append(_rev)
 
        cs_links = [" " + ', '.join(
 
            [lnk(rev, repo_name) for rev in revs[:revs_limit]]
 
        )]
 
        _op1, _name1 = _get_op(revs_ids[0])
 
        _op2, _name2 = _get_op(revs_ids[-1])
 

	
 
        _rev = '%s...%s' % (_name1, _name2)
 

	
 
        compare_view = (
 
            ' <div class="compare_view tooltip" title="%s">'
 
            '<a href="%s">%s</a> </div>' % (
 
                _('Show all combined changesets %s->%s') % (
 
                    revs_ids[0][:12], revs_ids[-1][:12]
 
                ),
 
                url('changeset_home', repo_name=repo_name,
 
                    revision=_rev
 
                ),
 
                _('compare view')
 
            )
 
        )
 

	
 
        # if we have exactly one more than normally displayed
 
        # just display it, takes less space than displaying
 
        # "and 1 more revisions"
 
        if len(revs_ids) == revs_limit + 1:
 
            cs_links.append(", " + lnk(revs[revs_limit], repo_name))
 

	
 
        # hidden-by-default ones
 
        if len(revs_ids) > revs_limit + 1:
 
            uniq_id = revs_ids[0]
 
            html_tmpl = (
 
                '<span> %s <a class="show_more" id="_%s" '
 
                'href="#more">%s</a> %s</span>'
 
            )
 
            if not feed:
 
                cs_links.append(html_tmpl % (
 
                      _('and'),
 
                      uniq_id, _('%s more') % (len(revs_ids) - revs_limit),
 
                      _('revisions')
 
                    )
 
                )
 

	
 
            if not feed:
 
                html_tmpl = '<span id="%s" style="display:none">, %s </span>'
 
            else:
 
                html_tmpl = '<span id="%s"> %s </span>'
 

	
 
            morelinks = ', '.join(
 
              [lnk(rev, repo_name) for rev in revs[revs_limit:]]
 
            )
 

	
 
            if len(revs_ids) > revs_top_limit:
 
                morelinks += ', ...'
 

	
 
            cs_links.append(html_tmpl % (uniq_id, morelinks))
 
        if len(revs) > 1:
 
            cs_links.append(compare_view)
 
        return ''.join(cs_links)
 

	
 
    def get_fork_name():
 
        repo_name = action_params
 
        _url = url('summary_home', repo_name=repo_name)
 
        return _('fork name %s') % link_to(action_params, _url)
 

	
 
    def get_user_name():
 
        user_name = action_params
 
        return user_name
 

	
 
    def get_users_group():
 
        group_name = action_params
 
        return group_name
 

	
 
    def get_pull_request():
 
        pull_request_id = action_params
 
        deleted = user_log.repository is None
 
        if deleted:
 
            repo_name = user_log.repository_name
 
        else:
 
            repo_name = user_log.repository.repo_name
 
        return link_to(_('Pull request #%s') % pull_request_id,
 
                    url('pullrequest_show', repo_name=repo_name,
 
                    pull_request_id=pull_request_id))
 

	
 
    def get_archive_name():
 
        archive_name = action_params
 
        return archive_name
 

	
 
    # action : translated str, callback(extractor), icon
 
    action_map = {
 
    'user_deleted_repo':           (_('[deleted] repository'),
 
                                    None, 'icon-trashcan'),
 
    'user_created_repo':           (_('[created] repository'),
 
                                    None, 'icon-plus icon-plus-colored'),
 
    'user_created_fork':           (_('[created] repository as fork'),
 
                                    None, 'icon-fork'),
 
    'user_forked_repo':            (_('[forked] repository'),
 
                                    get_fork_name, 'icon-fork'),
 
    'user_updated_repo':           (_('[updated] repository'),
 
                                    None, 'icon-pencil icon-pencil-colored'),
 
    'user_downloaded_archive':      (_('[downloaded] archive from repository'),
 
                                    get_archive_name, 'icon-download-cloud'),
 
    'admin_deleted_repo':          (_('[delete] repository'),
 
                                    None, 'icon-trashcan'),
 
    'admin_created_repo':          (_('[created] repository'),
 
                                    None, 'icon-plus icon-plus-colored'),
 
    'admin_forked_repo':           (_('[forked] repository'),
 
                                    None, 'icon-fork icon-fork-colored'),
 
    'admin_updated_repo':          (_('[updated] repository'),
 
                                    None, 'icon-pencil icon-pencil-colored'),
 
    'admin_created_user':          (_('[created] user'),
 
                                    get_user_name, 'icon-user icon-user-colored'),
 
    'admin_updated_user':          (_('[updated] user'),
 
                                    get_user_name, 'icon-user icon-user-colored'),
 
    'admin_created_users_group':   (_('[created] user group'),
 
                                    get_users_group, 'icon-pencil icon-pencil-colored'),
 
    'admin_updated_users_group':   (_('[updated] user group'),
 
                                    get_users_group, 'icon-pencil icon-pencil-colored'),
 
    'user_commented_revision':     (_('[commented] on revision in repository'),
 
                                    get_cs_links, 'icon-comment icon-comment-colored'),
 
    'user_commented_pull_request': (_('[commented] on pull request for'),
 
                                    get_pull_request, 'icon-comment icon-comment-colored'),
 
    'user_closed_pull_request':    (_('[closed] pull request for'),
 
                                    get_pull_request, 'icon-ok'),
 
    'push':                        (_('[pushed] into'),
 
                                    get_cs_links, 'icon-move-up'),
 
    'push_local':                  (_('[committed via Kallithea] into repository'),
 
                                    get_cs_links, 'icon-pencil icon-pencil-colored'),
 
    'push_remote':                 (_('[pulled from remote] into repository'),
 
                                    get_cs_links, 'icon-move-up'),
 
    'pull':                        (_('[pulled] from'),
 
                                    None, 'icon-move-down'),
 
    'started_following_repo':      (_('[started following] repository'),
 
                                    None, 'icon-heart icon-heart-colored'),
 
    'stopped_following_repo':      (_('[stopped following] repository'),
 
                                    None, 'icon-heart-empty icon-heart-colored'),
 
    }
 

	
 
    action_str = action_map.get(action, action)
 
    if feed:
 
        action = action_str[0].replace('[', '').replace(']', '')
 
    else:
 
        action = action_str[0]\
 
            .replace('[', '<span class="journal_highlight">')\
 
            .replace(']', '</span>')
 

	
 
    action_params_func = lambda: ""
 

	
 
    if callable(action_str[1]):
 
        action_params_func = action_str[1]
 

	
 
    def action_parser_icon():
 
        action = user_log.action
 
        action_params = None
 
        x = action.split(':')
 

	
 
        if len(x) > 1:
 
            action, action_params = x
 

	
 
        tmpl = """<i class="%s" alt="%s"></i>"""
 
        ico = action_map.get(action, ['', '', ''])[2]
 
        return literal(tmpl % (ico, action))
 

	
 
    # returned callbacks we need to call to get
 
    return [lambda: literal(action), action_params_func, action_parser_icon]
 

	
 

	
 

	
 
#==============================================================================
 
# PERMS
 
#==============================================================================
 
from kallithea.lib.auth import HasPermissionAny, HasPermissionAll, \
 
HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
 
HasRepoGroupPermissionAny
 

	
 

	
 
#==============================================================================
 
# GRAVATAR URL
 
#==============================================================================
 

	
 
def gravatar_url(email_address, size=30, ssl_enabled=True):
 
    # doh, we need to re-import those to mock it later
 
    from pylons import url
 
    from pylons import tmpl_context as c
 

	
 
    _def = 'anonymous@kallithea-scm.org'  # default gravatar
 
    _use_gravatar = c.visual.use_gravatar
 
    _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
 

	
 
    email_address = email_address or _def
 
    if isinstance(email_address, unicode):
 
        #hashlib crashes on unicode items
 
        email_address = safe_str(email_address)
 

	
 
    if not _use_gravatar or not email_address or email_address == _def:
 
        # pick best matching size to one given in size param
 
        f = lambda a, l: min(l, key=lambda x: abs(x - a))
 
        return url("/images/user%s.png" % f(size, [14, 16, 20, 24, 30]))
 

	
 
    if _use_gravatar:
 
        _md5 = lambda s: hashlib.md5(s).hexdigest()
 

	
 
        tmpl = _gravatar_url
 
        parsed_url = urlparse.urlparse(url.current(qualified=True))
 
        tmpl = tmpl.replace('{email}', email_address)\
 
                   .replace('{md5email}', _md5(email_address.lower())) \
 
                   .replace('{netloc}', parsed_url.netloc)\
 
                   .replace('{scheme}', parsed_url.scheme)\
 
                   .replace('{size}', safe_str(size))
 
        return tmpl
 

	
 
class Page(_Page):
 
    """
 
    Custom pager to match rendering style with YUI paginator
 
    """
 

	
 
    def _get_pos(self, cur_page, max_page, items):
 
        edge = (items / 2) + 1
 
        if (cur_page <= edge):
 
            radius = max(items / 2, items - cur_page)
 
        elif (max_page - cur_page) < edge:
 
            radius = (items - 1) - (max_page - cur_page)
 
        else:
 
            radius = items / 2
 

	
 
        left = max(1, (cur_page - (radius)))
 
        right = min(max_page, cur_page + (radius))
 
        return left, cur_page, right
 

	
 
    def _range(self, regexp_match):
 
        """
 
        Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
 

	
 
        Arguments:
 

	
 
        regexp_match
 
            A "re" (regular expressions) match object containing the
 
            radius of linked pages around the current page in
 
            regexp_match.group(1) as a string
 

	
 
        This function is supposed to be called as a callable in
 
        re.sub.
 

	
 
        """
 
        radius = int(regexp_match.group(1))
 

	
 
        # Compute the first and last page number within the radius
 
        # e.g. '1 .. 5 6 [7] 8 9 .. 12'
 
        # -> leftmost_page  = 5
 
        # -> rightmost_page = 9
 
        leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
 
                                                            self.last_page,
 
                                                            (radius * 2) + 1)
 
        nav_items = []
 

	
 
        # Create a link to the first page (unless we are on the first page
 
        # or there would be no need to insert '..' spacers)
 
        if self.page != self.first_page and self.first_page < leftmost_page:
 
            nav_items.append(self._pagerlink(self.first_page, self.first_page))
 

	
 
        # Insert dots if there are pages between the first page
 
        # and the currently displayed page range
 
        if leftmost_page - self.first_page > 1:
 
            # Wrap in a SPAN tag if nolink_attr is set
 
            text = '..'
 
            if self.dotdot_attr:
 
                text = HTML.span(c=text, **self.dotdot_attr)
 
            nav_items.append(text)
 

	
 
        for thispage in xrange(leftmost_page, rightmost_page + 1):
 
            # Hilight the current page number and do not use a link
 
            if thispage == self.page:
 
                text = '%s' % (thispage,)
 
                # Wrap in a SPAN tag if nolink_attr is set
 
                if self.curpage_attr:
 
                    text = HTML.span(c=text, **self.curpage_attr)
 
                nav_items.append(text)
 
            # Otherwise create just a link to that page
 
            else:
 
                text = '%s' % (thispage,)
 
                nav_items.append(self._pagerlink(thispage, text))
 

	
 
        # Insert dots if there are pages between the displayed
 
        # page numbers and the end of the page range
 
        if self.last_page - rightmost_page > 1:
 
            text = '..'
 
            # Wrap in a SPAN tag if nolink_attr is set
 
            if self.dotdot_attr:
 
                text = HTML.span(c=text, **self.dotdot_attr)
 
            nav_items.append(text)
 

	
 
        # Create a link to the very last page (unless we are on the last
 
        # page or there would be no need to insert '..' spacers)
 
        if self.page != self.last_page and rightmost_page < self.last_page:
 
            nav_items.append(self._pagerlink(self.last_page, self.last_page))
 

	
 
        #_page_link = url.current()
 
        #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
 
        #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
 
        return self.separator.join(nav_items)
 

	
 
    def pager(self, format='~2~', page_param='page', partial_param='partial',
 
        show_if_single_page=False, separator=' ', onclick=None,
 
        symbol_first='<<', symbol_last='>>',
 
        symbol_previous='<', symbol_next='>',
 
        link_attr={'class': 'pager_link', 'rel': 'prerender'},
 
        curpage_attr={'class': 'pager_curpage'},
 
        dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
 

	
 
        self.curpage_attr = curpage_attr
 
        self.separator = separator
 
        self.pager_kwargs = kwargs
 
        self.page_param = page_param
 
        self.partial_param = partial_param
 
        self.onclick = onclick
 
        self.link_attr = link_attr
 
        self.dotdot_attr = dotdot_attr
 

	
 
        # Don't show navigator if there is no more than one page
 
        if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
 
            return ''
 

	
 
        from string import Template
 
        # Replace ~...~ in token format by range of pages
 
        result = re.sub(r'~(\d+)~', self._range, format)
 

	
 
        # Interpolate '%' variables
 
        result = Template(result).safe_substitute({
 
            'first_page': self.first_page,
 
            'last_page': self.last_page,
 
            'page': self.page,
 
            'page_count': self.page_count,
 
            'items_per_page': self.items_per_page,
 
            'first_item': self.first_item,
 
            'last_item': self.last_item,
 
            'item_count': self.item_count,
 
            'link_first': self.page > self.first_page and \
 
                    self._pagerlink(self.first_page, symbol_first) or '',
 
            'link_last': self.page < self.last_page and \
 
                    self._pagerlink(self.last_page, symbol_last) or '',
 
            'link_previous': self.previous_page and \
 
                    self._pagerlink(self.previous_page, symbol_previous) \
 
                    or HTML.span(symbol_previous, class_="yui-pg-previous"),
 
            'link_next': self.next_page and \
 
                    self._pagerlink(self.next_page, symbol_next) \
 
                    or HTML.span(symbol_next, class_="yui-pg-next")
 
        })
 

	
 
        return literal(result)
 

	
 

	
 
#==============================================================================
 
# REPO PAGER, PAGER FOR REPOSITORY
 
#==============================================================================
 
class RepoPage(Page):
 

	
 
    def __init__(self, collection, page=1, items_per_page=20,
 
                 item_count=None, url=None, **kwargs):
 

	
 
        """Create a "RepoPage" instance. special pager for paging
 
        repository
 
        """
 
        self._url_generator = url
 

	
 
        # Safe the kwargs class-wide so they can be used in the pager() method
 
        self.kwargs = kwargs
 

	
 
        # Save a reference to the collection
 
        self.original_collection = collection
 

	
 
        self.collection = collection
 

	
 
        # The self.page is the number of the current page.
 
        # The first page has the number 1!
 
        try:
 
            self.page = int(page)  # make it int() if we get it as a string
 
        except (ValueError, TypeError):
 
            self.page = 1
 

	
 
        self.items_per_page = items_per_page
 

	
 
        # Unless the user tells us how many items the collections has
 
        # we calculate that ourselves.
 
        if item_count is not None:
 
            self.item_count = item_count
 
        else:
 
            self.item_count = len(self.collection)
 

	
 
        # Compute the number of the first and last available page
 
        if self.item_count > 0:
 
            self.first_page = 1
 
            self.page_count = int(math.ceil(float(self.item_count) /
 
                                            self.items_per_page))
 
            self.last_page = self.first_page + self.page_count - 1
 

	
 
            # Make sure that the requested page number is the range of
 
            # valid pages
 
            if self.page > self.last_page:
 
                self.page = self.last_page
 
            elif self.page < self.first_page:
 
                self.page = self.first_page
 

	
 
            # Note: the number of items on this page can be less than
 
            #       items_per_page if the last page is not full
 
            self.first_item = max(0, (self.item_count) - (self.page *
 
                                                          items_per_page))
 
            self.last_item = ((self.item_count - 1) - items_per_page *
 
                              (self.page - 1))
 

	
 
            self.items = list(self.collection[self.first_item:self.last_item + 1])
 

	
 
            # Links to previous and next page
 
            if self.page > self.first_page:
 
                self.previous_page = self.page - 1
 
            else:
 
                self.previous_page = None
 

	
 
            if self.page < self.last_page:
 
                self.next_page = self.page + 1
 
            else:
 
                self.next_page = None
 

	
 
        # No items available
 
        else:
 
            self.first_page = None
 
            self.page_count = 0
 
            self.last_page = None
 
            self.first_item = None
 
            self.last_item = None
 
            self.previous_page = None
 
            self.next_page = None
 
            self.items = []
 

	
 
        # This is a subclass of the 'list' type. Initialise the list now.
 
        list.__init__(self, reversed(self.items))
 

	
 

	
 
def changed_tooltip(nodes):
 
    """
 
    Generates a html string for changed nodes in changeset page.
 
    It limits the output to 30 entries
 

	
 
    :param nodes: LazyNodesGenerator
 
    """
 
    if nodes:
 
        pref = ': <br/> '
 
        suf = ''
 
        if len(nodes) > 30:
 
            suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
 
        return literal(pref + '<br/> '.join([safe_unicode(x.path)
 
                                             for x in nodes[:30]]) + suf)
 
    else:
 
        return ': ' + _('No Files')
 

	
 

	
 
def repo_link(groups_and_repos):
 
    """
 
    Makes a breadcrumbs link to repo within a group
 
    joins &raquo; on each group to create a fancy link
 

	
 
    ex::
 
        group >> subgroup >> repo
 

	
 
    :param groups_and_repos:
 
    :param last_url:
 
    """
 
    groups, just_name, repo_name = groups_and_repos
 
    last_url = url('summary_home', repo_name=repo_name)
 
    last_link = link_to(just_name, last_url)
 

	
 
    def make_link(group):
 
        return link_to(group.name,
 
                       url('repos_group_home', group_name=group.group_name))
 
    return literal(' &raquo; '.join(map(make_link, groups) + ['<span>%s</span>' % last_link]))
 

	
 

	
 
def fancy_file_stats(stats):
 
    """
 
    Displays a fancy two colored bar for number of added/deleted
 
    lines of code on file
 

	
 
    :param stats: two element list of added/deleted lines of code
 
    """
 
    from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
 
        MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
 

	
 
    def cgen(l_type, a_v, d_v):
 
        mapping = {'tr': 'top-right-rounded-corner-mid',
 
                   'tl': 'top-left-rounded-corner-mid',
 
                   'br': 'bottom-right-rounded-corner-mid',
 
                   'bl': 'bottom-left-rounded-corner-mid'}
 
        map_getter = lambda x: mapping[x]
 

	
 
        if l_type == 'a' and d_v:
 
            #case when added and deleted are present
 
            return ' '.join(map(map_getter, ['tl', 'bl']))
 

	
 
        if l_type == 'a' and not d_v:
 
            return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
 

	
 
        if l_type == 'd' and a_v:
 
            return ' '.join(map(map_getter, ['tr', 'br']))
 

	
 
        if l_type == 'd' and not a_v:
 
            return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
 

	
 
    a, d = stats['added'], stats['deleted']
 
    width = 100
 

	
 
    if stats['binary']:
 
        #binary mode
 
        lbl = ''
 
        bin_op = 1
 

	
 
        if BIN_FILENODE in stats['ops']:
 
            lbl = 'bin+'
 

	
 
        if NEW_FILENODE in stats['ops']:
 
            lbl += _('new file')
 
            bin_op = NEW_FILENODE
 
        elif MOD_FILENODE in stats['ops']:
 
            lbl += _('mod')
 
            bin_op = MOD_FILENODE
 
        elif DEL_FILENODE in stats['ops']:
 
            lbl += _('del')
 
            bin_op = DEL_FILENODE
 
        elif RENAMED_FILENODE in stats['ops']:
 
            lbl += _('rename')
 
            bin_op = RENAMED_FILENODE
 

	
 
        #chmod can go with other operations
 
        if CHMOD_FILENODE in stats['ops']:
 
            _org_lbl = _('chmod')
 
            lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl
 

	
 
        #import ipdb;ipdb.set_trace()
 
        b_d = '<div class="bin bin%s %s" style="width:100%%">%s</div>' % (bin_op, cgen('a', a_v='', d_v=0), lbl)
 
        b_a = '<div class="bin bin1" style="width:0%%"></div>'
 
        return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
 

	
 
    t = stats['added'] + stats['deleted']
 
    unit = float(width) / (t or 1)
 

	
 
    # needs > 9% of width to be visible or 0 to be hidden
 
    a_p = max(9, unit * a) if a > 0 else 0
 
    d_p = max(9, unit * d) if d > 0 else 0
 
    p_sum = a_p + d_p
 

	
 
    if p_sum > width:
 
        #adjust the percentage to be == 100% since we adjusted to 9
 
        if a_p > d_p:
 
            a_p = a_p - (p_sum - width)
 
        else:
 
            d_p = d_p - (p_sum - width)
 

	
 
    a_v = a if a > 0 else ''
 
    d_v = d if d > 0 else ''
 

	
 
    d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
 
        cgen('a', a_v, d_v), a_p, a_v
 
    )
 
    d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
 
        cgen('d', a_v, d_v), d_p, d_v
 
    )
 
    return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
 

	
 

	
 
def urlify_text(text_, safe=True):
 
    """
 
    Extrac urls from text and make html links out of them
 

	
 
    :param text_:
 
    """
 

	
 
    def url_func(match_obj):
 
        url_full = match_obj.groups()[0]
 
        return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
 
    _newtext = url_re.sub(url_func, text_)
 
    if safe:
 
        return literal(_newtext)
 
    return _newtext
 

	
 

	
 
def urlify_changesets(text_, repository):
 
    """
 
    Extract revision ids from changeset and make link from them
 

	
 
    :param text_:
 
    :param repository: repo name to build the URL with
 
    """
 
    from pylons import url  # doh, we need to re-import url to mock it later
 

	
 
    def url_func(match_obj):
 
        rev = match_obj.group(0)
 
        return '<a class="revision-link" href="%(url)s">%(rev)s</a>' % {
 
         'url': url('changeset_home', repo_name=repository, revision=rev),
 
         'rev': rev,
 
        }
 

	
 
    return re.sub(r'(?:^|(?<=[\s(),]))([0-9a-fA-F]{12,40})(?=$|\s|[.,:()])', url_func, text_)
 

	
 
def linkify_others(t, l):
 
    urls = re.compile(r'(\<a.*?\<\/a\>)',)
 
    links = []
 
    for e in urls.split(t):
 
        if not urls.match(e):
 
            links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
 
        else:
 
            links.append(e)
 

	
 
    return ''.join(links)
 

	
 
def urlify_commit(text_, repository, link_=None):
 
    """
 
    Parses given text message and makes proper links.
 
    issues are linked to given issue-server, and rest is a changeset link
 
    if link_ is given, in other case it's a plain text
 

	
 
    :param text_:
 
    :param repository:
 
    :param link_: changeset link
 
    """
 
    def escaper(string):
 
        return string.replace('<', '&lt;').replace('>', '&gt;')
 

	
 
    # urlify changesets - extract revisions and make link out of them
 
    newtext = urlify_changesets(escaper(text_), repository)
 

	
 
    # extract http/https links and make them real urls
 
    newtext = urlify_text(newtext, safe=False)
 

	
 
    newtext = urlify_issues(newtext, repository, link_)
 

	
 
    return literal(newtext)
 

	
 
def urlify_issues(newtext, repository, link_=None):
 
    try:
 
        import traceback
 
        from kallithea import CONFIG
 
        conf = CONFIG
 
    from kallithea import CONFIG as conf
 

	
 
        # allow multiple issue servers to be used
 
        valid_indices = [
 
            x.group(1)
 
            for x in map(lambda x: re.match(r'issue_pat(.*)', x), conf.keys())
 
            if x and 'issue_server_link%s' % x.group(1) in conf
 
            and 'issue_prefix%s' % x.group(1) in conf
 
        ]
 
    # allow multiple issue servers to be used
 
    valid_indices = [
 
        x.group(1)
 
        for x in map(lambda x: re.match(r'issue_pat(.*)', x), conf.keys())
 
        if x and 'issue_server_link%s' % x.group(1) in conf
 
        and 'issue_prefix%s' % x.group(1) in conf
 
    ]
 

	
 
        if valid_indices:
 
            log.debug('found issue server suffixes `%s` during valuation of: %s'
 
                      % (','.join(valid_indices), newtext))
 
    if valid_indices:
 
        log.debug('found issue server suffixes `%s` during valuation of: %s'
 
                  % (','.join(valid_indices), newtext))
 

	
 
        for pattern_index in valid_indices:
 
            ISSUE_PATTERN = conf.get('issue_pat%s' % pattern_index)
 
            ISSUE_SERVER_LNK = conf.get('issue_server_link%s' % pattern_index)
 
            ISSUE_PREFIX = conf.get('issue_prefix%s' % pattern_index)
 
    for pattern_index in valid_indices:
 
        ISSUE_PATTERN = conf.get('issue_pat%s' % pattern_index)
 
        ISSUE_SERVER_LNK = conf.get('issue_server_link%s' % pattern_index)
 
        ISSUE_PREFIX = conf.get('issue_prefix%s' % pattern_index)
 

	
 
            log.debug('pattern suffix `%s` PAT:%s SERVER_LINK:%s PREFIX:%s'
 
                      % (pattern_index, ISSUE_PATTERN, ISSUE_SERVER_LNK,
 
                         ISSUE_PREFIX))
 
        log.debug('pattern suffix `%s` PAT:%s SERVER_LINK:%s PREFIX:%s'
 
                  % (pattern_index, ISSUE_PATTERN, ISSUE_SERVER_LNK,
 
                     ISSUE_PREFIX))
 

	
 
            URL_PAT = re.compile(r'%s' % ISSUE_PATTERN)
 
        URL_PAT = re.compile(r'%s' % ISSUE_PATTERN)
 

	
 
            def url_func(match_obj):
 
                pref = ''
 
                if match_obj.group().startswith(' '):
 
                    pref = ' '
 
        def url_func(match_obj):
 
            pref = ''
 
            if match_obj.group().startswith(' '):
 
                pref = ' '
 

	
 
                issue_id = ''.join(match_obj.groups())
 
                issue_url = ISSUE_SERVER_LNK.replace('{id}', issue_id)
 
                if repository:
 
                    issue_url = issue_url.replace('{repo}', repository)
 
                    repo_name = repository.split(URL_SEP)[-1]
 
                    issue_url = issue_url.replace('{repo_name}', repo_name)
 
            issue_id = ''.join(match_obj.groups())
 
            issue_url = ISSUE_SERVER_LNK.replace('{id}', issue_id)
 
            if repository:
 
                issue_url = issue_url.replace('{repo}', repository)
 
                repo_name = repository.split(URL_SEP)[-1]
 
                issue_url = issue_url.replace('{repo_name}', repo_name)
 

	
 
                return (
 
                    '%(pref)s<a class="%(cls)s" href="%(url)s">'
 
                    '%(issue-prefix)s%(id-repr)s'
 
                    '</a>'
 
                    ) % {
 
                     'pref': pref,
 
                     'cls': 'issue-tracker-link',
 
                     'url': issue_url,
 
                     'id-repr': issue_id,
 
                     'issue-prefix': ISSUE_PREFIX,
 
                     'serv': ISSUE_SERVER_LNK,
 
                    }
 
            newtext = URL_PAT.sub(url_func, newtext)
 
            log.debug('processed prefix:`%s` => %s' % (pattern_index, newtext))
 
            return (
 
                '%(pref)s<a class="%(cls)s" href="%(url)s">'
 
                '%(issue-prefix)s%(id-repr)s'
 
                '</a>'
 
                ) % {
 
                 'pref': pref,
 
                 'cls': 'issue-tracker-link',
 
                 'url': issue_url,
 
                 'id-repr': issue_id,
 
                 'issue-prefix': ISSUE_PREFIX,
 
                 'serv': ISSUE_SERVER_LNK,
 
                }
 
        newtext = URL_PAT.sub(url_func, newtext)
 
        log.debug('processed prefix:`%s` => %s' % (pattern_index, newtext))
 

	
 
        # if we actually did something above
 
        if link_:
 
            # wrap not links into final link => link_
 
            newtext = linkify_others(newtext, link_)
 
    except Exception:
 
        log.error(traceback.format_exc())
 
        pass
 
    # if we actually did something above
 
    if link_:
 
        # wrap not links into final link => link_
 
        newtext = linkify_others(newtext, link_)
 
    return newtext
 

	
 

	
 
def rst(source):
 
    return literal('<div class="rst-block">%s</div>' %
 
                   MarkupRenderer.rst(source))
 

	
 

	
 
def rst_w_mentions(source):
 
    """
 
    Wrapped rst renderer with @mention highlighting
 

	
 
    :param source:
 
    """
 
    return literal('<div class="rst-block">%s</div>' %
 
                   MarkupRenderer.rst_with_mentions(source))
 

	
 
def short_ref(ref_type, ref_name):
 
    if ref_type == 'rev':
 
        return short_id(ref_name)
 
    return ref_name
 

	
 
def link_to_ref(repo_name, ref_type, ref_name, rev=None):
 
    """
 
    Return full markup for a href to changeset_home for a changeset.
 
    If ref_type is branch it will link to changelog.
 
    ref_name is shortened if ref_type is 'rev'.
 
    if rev is specified show it too, explicitly linking to that revision.
 
    """
 
    txt = short_ref(ref_type, ref_name)
 
    if ref_type == 'branch':
 
        u = url('changelog_home', repo_name=repo_name, branch=ref_name)
 
    else:
 
        u = url('changeset_home', repo_name=repo_name, revision=ref_name)
 
    l = link_to(repo_name + '#' + txt, u)
 
    if rev and ref_type != 'rev':
 
        l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
 
    return l
 

	
 
def changeset_status(repo, revision):
 
    return ChangesetStatusModel().get_status(repo, revision)
 

	
 

	
 
def changeset_status_lbl(changeset_status):
 
    return dict(ChangesetStatus.STATUSES).get(changeset_status)
 

	
 

	
 
def get_permission_name(key):
 
    return dict(Permission.PERMS).get(key)
 

	
 

	
 
def journal_filter_help():
 
    return _(textwrap.dedent('''
 
        Example filter terms:
 
            repository:vcs
 
            username:developer
 
            action:*push*
 
            ip:127.0.0.1
 
            date:20120101
 
            date:[20120101100000 TO 20120102]
 

	
 
        Generate wildcards using '*' character:
 
            "repositroy:vcs*" - search everything starting with 'vcs'
 
            "repository:*vcs*" - search for repository containing 'vcs'
 

	
 
        Optional AND / OR operators in queries
 
            "repository:vcs OR repository:test"
 
            "username:test AND repository:test*"
 
    '''))
 

	
 

	
 
def not_mapped_error(repo_name):
 
    flash(_('%s repository is not mapped to db perhaps'
 
            ' it was created or renamed from the filesystem'
 
            ' please run the application again'
 
            ' in order to rescan repositories') % repo_name, category='error')
 

	
 

	
 
def ip_range(ip_addr):
 
    from kallithea.model.db import UserIpMap
 
    s, e = UserIpMap._get_ip_range(ip_addr)
 
    return '%s - %s' % (s, e)

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)