Changeset - cc1ab5ef6686
[Not reviewed]
kallithea/bin/kallithea_api.py
Show inline comments
 
@@ -56,71 +56,71 @@ def argparser(argv):
 
    )
 
    group.add_argument('--format', dest='format', type=str,
 
            help='output format default: `%s` can '
 
                 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    apihost = args.apihost or conf['apihost']
 
    method = args.method
 

	
 
    # if we don't have method here it's an error
 
    if not method:
 
        parser.error('Please specify method name')
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except Exception:
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
    return 0
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_config.py
Show inline comments
 
@@ -107,56 +107,53 @@ def _run(argv):
 
        # parse arguments, we assume only first is correct
 
        kwargs = {}
 
        for el in _escape_split(other[0], ','):
 
            kv = _escape_split(el, '=')
 
            if len(kv) == 2:
 
                k, v = kv
 
                kwargs[k] = v
 
        # update our template stored args
 
        tmpl_stored_args.update(kwargs)
 

	
 
    # use default that cannot be replaced
 
    tmpl_stored_args.update({
 
        'uuid': lambda: uuid.uuid4().hex,
 
        'here': os.path.abspath(os.curdir),
 
    })
 
    if args.show_defaults:
 
        for k,v in tmpl_stored_args.iteritems():
 
            print '%s=%s' % (k, v)
 
        sys.exit(0)
 
    try:
 
        # built in template
 
        tmpl_file = os.path.join(here, TMPL)
 
        if args.template:
 
            tmpl_file = args.template
 

	
 
        with open(tmpl_file, 'rb') as f:
 
            tmpl_data = f.read()
 
            if args.raw:
 
                tmpl = tmpl_data
 
            else:
 
                tmpl = Template(tmpl_data).render(**tmpl_stored_args)
 
        with open(args.filename, 'wb') as f:
 
            f.write(tmpl)
 
        print 'Wrote new config file in %s' % (os.path.abspath(args.filename))
 

	
 
    except Exception:
 
        from mako import exceptions
 
        print exceptions.text_error_template().render()
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    try:
 
        return _run(argv)
 
    except Exception:
 
        raise
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/controllers/admin/admin.py
Show inline comments
 
@@ -86,67 +86,63 @@ def _journal_filter(user_log, search_ter
 
        else:
 
            field = getattr(UserLog, field)
 
        log.debug('filter field: %s val=>%s' % (field, val))
 

	
 
        #sql filtering
 
        if isinstance(term, query.Wildcard):
 
            return wildcard_handler(field, val)
 
        elif isinstance(term, query.Prefix):
 
            return func.lower(field).startswith(func.lower(val))
 
        elif isinstance(term, query.DateRange):
 
            return and_(field >= val[0], field <= val[1])
 
        return func.lower(field) == func.lower(val)
 

	
 
    if isinstance(qry, (query.And, query.Term, query.Prefix, query.Wildcard,
 
                        query.DateRange)):
 
        if not isinstance(qry, query.And):
 
            qry = [qry]
 
        for term in qry:
 
            field = term.fieldname
 
            val = (term.text if not isinstance(term, query.DateRange)
 
                   else [term.startdate, term.enddate])
 
            user_log = user_log.filter(get_filterion(field, val, term))
 
    elif isinstance(qry, query.Or):
 
        filters = []
 
        for term in qry:
 
            field = term.fieldname
 
            val = (term.text if not isinstance(term, query.DateRange)
 
                   else [term.startdate, term.enddate])
 
            filters.append(get_filterion(field, val, term))
 
        user_log = user_log.filter(or_(*filters))
 

	
 
    return user_log
 

	
 

	
 
class AdminController(BaseController):
 

	
 
    @LoginRequired()
 
    def __before__(self):
 
        super(AdminController, self).__before__()
 

	
 
    @HasPermissionAllDecorator('hg.admin')
 
    def index(self):
 
        users_log = UserLog.query()\
 
                .options(joinedload(UserLog.user))\
 
                .options(joinedload(UserLog.repository))
 

	
 
        #FILTERING
 
        c.search_term = request.GET.get('filter')
 
        try:
 
            users_log = _journal_filter(users_log, c.search_term)
 
        except Exception:
 
            # we want this to crash for now
 
            raise
 

	
 
        users_log = users_log.order_by(UserLog.action_date.desc())
 

	
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        def url_generator(**kw):
 
            return url.current(filter=c.search_term, **kw)
 

	
 
        c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator)
 
        c.log_data = render('admin/admin_log.html')
 

	
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.log_data
 
        return render('admin/admin.html')
kallithea/controllers/changeset.py
Show inline comments
 
@@ -30,100 +30,97 @@ import logging
 
import traceback
 
from collections import defaultdict
 
from webob.exc import HTTPForbidden, HTTPBadRequest, HTTPNotFound
 

	
 
from pylons import tmpl_context as c, request, response
 
from pylons.i18n.translation import _
 
from pylons.controllers.util import redirect
 
from kallithea.lib.utils import jsonify
 

	
 
from kallithea.lib.vcs.exceptions import RepositoryError, \
 
    ChangesetDoesNotExistError
 

	
 
from kallithea.lib.compat import json
 
import kallithea.lib.helpers as h
 
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\
 
    NotAnonymous
 
from kallithea.lib.base import BaseRepoController, render
 
from kallithea.lib.utils import action_logger
 
from kallithea.lib.compat import OrderedDict
 
from kallithea.lib import diffs
 
from kallithea.model.db import ChangesetComment, ChangesetStatus
 
from kallithea.model.comment import ChangesetCommentsModel
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.lib.diffs import LimitedDiffContainer
 
from kallithea.lib.exceptions import StatusChangeOnClosedPullRequestError
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.utils2 import safe_unicode
 
from kallithea.lib.graphmod import graph_data
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
def _update_with_GET(params, GET):
 
    for k in ['diff1', 'diff2', 'diff']:
 
        params[k] += GET.getall(k)
 

	
 

	
 
def anchor_url(revision, path, GET):
 
    fid = h.FID(revision, path)
 
    return h.url.current(anchor=fid, **dict(GET))
 

	
 

	
 
def get_ignore_ws(fid, GET):
 
    ig_ws_global = GET.get('ignorews')
 
    ig_ws = filter(lambda k: k.startswith('WS'), GET.getall(fid))
 
    if ig_ws:
 
        try:
 
            return int(ig_ws[0].split(':')[-1])
 
        except Exception:
 
            pass
 
    return ig_ws_global
 

	
 

	
 
def _ignorews_url(GET, fileid=None):
 
    fileid = str(fileid) if fileid else None
 
    params = defaultdict(list)
 
    _update_with_GET(params, GET)
 
    lbl = _('Show whitespace')
 
    ig_ws = get_ignore_ws(fileid, GET)
 
    ln_ctx = get_line_ctx(fileid, GET)
 
    # global option
 
    if fileid is None:
 
        if ig_ws is None:
 
            params['ignorews'] += [1]
 
            lbl = _('Ignore whitespace')
 
        ctx_key = 'context'
 
        ctx_val = ln_ctx
 
    # per file options
 
    else:
 
        if ig_ws is None:
 
            params[fileid] += ['WS:1']
 
            lbl = _('Ignore whitespace')
 

	
 
        ctx_key = fileid
 
        ctx_val = 'C:%s' % ln_ctx
 
    # if we have passed in ln_ctx pass it along to our params
 
    if ln_ctx:
 
        params[ctx_key] += [ctx_val]
 

	
 
    params['anchor'] = fileid
 
    icon = h.literal('<i class="icon-strike"></i>')
 
    return h.link_to(icon, h.url.current(**params), title=lbl, class_='tooltip')
 

	
 

	
 
def get_line_ctx(fid, GET):
 
    ln_ctx_global = GET.get('context')
 
    if fid:
 
        ln_ctx = filter(lambda k: k.startswith('C'), GET.getall(fid))
 
    else:
 
        _ln_ctx = filter(lambda k: k.startswith('C'), GET)
 
        ln_ctx = GET.get(_ln_ctx[0]) if _ln_ctx  else ln_ctx_global
 
        if ln_ctx:
 
            ln_ctx = [ln_ctx]
 

	
 
    if ln_ctx:
 
        retval = ln_ctx[0].split(':')[-1]
 
    else:
 
        retval = ln_ctx_global
kallithea/controllers/error.py
Show inline comments
 
@@ -44,63 +44,63 @@ class ErrorController(BaseController):
 

	
 
    The ErrorDocuments middleware forwards to ErrorController when error
 
    related status codes are returned from the application.
 

	
 
    This behavior can be altered by changing the parameters to the
 
    ErrorDocuments middleware in your config/middleware.py file.
 
    """
 

	
 
    def __before__(self):
 
        #disable all base actions since we don't need them here
 
        pass
 

	
 
    def document(self):
 
        resp = request.environ.get('pylons.original_response')
 
        c.site_name = config.get('title')
 

	
 
        log.debug('### %s ###' % resp.status)
 

	
 
        e = request.environ
 
        c.serv_p = r'%(protocol)s://%(host)s/' \
 
                                    % {'protocol': e.get('wsgi.url_scheme'),
 
                                       'host': e.get('HTTP_HOST'), }
 

	
 
        c.error_message = cgi.escape(request.GET.get('code', str(resp.status)))
 
        c.error_explanation = self.get_error_explanation(resp.status_int)
 

	
 
        return render('/errors/error_document.html')
 

	
 
    def img(self, id):
 
        """Serve Pylons' stock images"""
 
        return self._serve_file(os.path.join(media_path, 'img', id))
 

	
 
    def style(self, id):
 
        """Serve Pylons' stock stylesheets"""
 
        return self._serve_file(os.path.join(media_path, 'style', id))
 

	
 
    def _serve_file(self, path):
 
        """Call Paste's FileApp (a WSGI application) to serve the file
 
        at the specified path
 
        """
 
        fapp = paste.fileapp.FileApp(path)
 
        return fapp(request.environ, self.start_response)
 

	
 
    def get_error_explanation(self, code):
 
        """ get the error explanations of int codes
 
            [400, 401, 403, 404, 500]"""
 
        try:
 
            code = int(code)
 
        except Exception:
 
        except ValueError:
 
            code = 500
 

	
 
        if code == 400:
 
            return _('The request could not be understood by the server'
 
                     ' due to malformed syntax.')
 
        if code == 401:
 
            return _('Unauthorized access to resource')
 
        if code == 403:
 
            return _("You don't have permission to view this page")
 
        if code == 404:
 
            return _('The resource could not be found')
 
        if code == 500:
 
            return _('The server encountered an unexpected condition'
 
                     ' which prevented it from fulfilling the request.')
kallithea/controllers/journal.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.controllers.journal
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Journal controller for pylons
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 21, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 

	
 
"""
 

	
 
import logging
 
import traceback
 
from itertools import groupby
 

	
 
from sqlalchemy import or_
 
from sqlalchemy.orm import joinedload
 
from sqlalchemy.sql.expression import func
 

	
 
from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed
 

	
 
from webob.exc import HTTPBadRequest
 
from pylons import request, tmpl_context as c, response, url
 
from pylons.i18n.translation import _
 

	
 
from kallithea.controllers.admin.admin import _journal_filter
 
from kallithea.model.db import UserLog, UserFollowing, Repository, User
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
import kallithea.lib.helpers as h
 
from kallithea.lib.helpers import Page
 
from kallithea.lib.auth import LoginRequired, NotAnonymous
 
from kallithea.lib.base import BaseController, render
 
from kallithea.lib.utils2 import safe_int, AttributeDict
 
from kallithea.lib.compat import json
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class JournalController(BaseController):
 

	
 
    def __before__(self):
 
        super(JournalController, self).__before__()
 
        self.language = 'en-us'
 
        self.ttl = "5"
 
        self.feed_nr = 20
 
        c.search_term = request.GET.get('filter')
 

	
 
    def _get_daily_aggregate(self, journal):
 
        groups = []
 
        for k, g in groupby(journal, lambda x: x.action_as_day):
 
            user_group = []
 
            #groupby username if it's a present value, else fallback to journal username
 
            for _unused, g2 in groupby(list(g), lambda x: x.user.username if x.user else x.username):
 
                l = list(g2)
 
                user_group.append((l[0].user, l))
 

	
 
            groups.append((k, user_group,))
 

	
 
        return groups
 

	
 
    def _get_journal_data(self, following_repos):
 
        repo_ids = [x.follows_repository.repo_id for x in following_repos
 
                    if x.follows_repository is not None]
 
        user_ids = [x.follows_user.user_id for x in following_repos
 
                    if x.follows_user is not None]
 

	
 
        filtering_criterion = None
 

	
 
        if repo_ids and user_ids:
 
            filtering_criterion = or_(UserLog.repository_id.in_(repo_ids),
 
                        UserLog.user_id.in_(user_ids))
 
        if repo_ids and not user_ids:
 
            filtering_criterion = UserLog.repository_id.in_(repo_ids)
 
        if not repo_ids and user_ids:
 
            filtering_criterion = UserLog.user_id.in_(user_ids)
 
        if filtering_criterion is not None:
 
            journal = self.sa.query(UserLog)\
 
                .options(joinedload(UserLog.user))\
 
                .options(joinedload(UserLog.repository))
 
            #filter
 
            try:
 
                journal = _journal_filter(journal, c.search_term)
 
            except Exception:
 
                # we want this to crash for now
 
                raise
 
            journal = journal.filter(filtering_criterion)\
 
                        .order_by(UserLog.action_date.desc())
 
        else:
 
            journal = []
 

	
 
        return journal
 

	
 
    def _atom_feed(self, repos, public=True):
 
        journal = self._get_journal_data(repos)
 
        if public:
 
            _link = h.canonical_url('public_journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('public journal'),
 
                                  'atom feed')
 
        else:
 
            _link = h.canonical_url('journal_atom')
 
            _desc = '%s %s %s' % (c.site_name, _('journal'), 'atom feed')
 

	
 
        feed = Atom1Feed(title=_desc,
 
                         link=_link,
 
                         description=_desc,
 
                         language=self.language,
 
                         ttl=self.ttl)
 

	
 
        for entry in journal[:self.feed_nr]:
 
            user = entry.user
 
            if user is None:
 
                #fix deleted users
 
                user = AttributeDict({'short_contact': entry.username,
 
                                      'email': '',
 
                                      'full_contact': ''})
 
            action, action_extra, ico = h.action_parser(entry, feed=True)
 
            title = "%s - %s %s" % (user.short_contact, action(),
 
                                    entry.repository.repo_name)
 
            desc = action_extra()
 
            _url = None
 
            if entry.repository is not None:
 
                _url = h.canonical_url('changelog_home',
 
                           repo_name=entry.repository.repo_name)
 

	
 
            feed.add_item(title=title,
 
                          pubdate=entry.action_date,
 
                          link=_url or h.canonical_url(''),
 
                          author_email=user.email,
 
                          author_name=user.full_contact,
 
                          description=desc)
 

	
 
        response.content_type = feed.mime_type
 
        return feed.writeString('utf-8')
 
@@ -275,106 +272,108 @@ class JournalController(BaseController):
 
        c.watched_data = json.dumps({
 
            "totalRecords": len(c.following),
 
            "startIndex": 0,
 
            "sort": "name",
 
            "dir": "asc",
 
            "records": watched_repos_data
 
        })
 
        return render('journal/journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._atom_feed(following, public=False)
 

	
 
    @LoginRequired(api_access=True)
 
    @NotAnonymous()
 
    def journal_rss(self):
 
        """
 
        Produce an rss feed via feedgenerator module
 
        """
 
        following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 
        return self._rss_feed(following, public=False)
 

	
 
    @LoginRequired()
 
    @NotAnonymous()
 
    def toggle_following(self):
 
        cur_token = request.POST.get('auth_token')
 
        token = h.get_token()
 
        if cur_token == token:
 

	
 
            user_id = request.POST.get('follows_user_id')
 
            if user_id:
 
                try:
 
                    self.scm_model.toggle_following_user(user_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    log.error(traceback.format_exc())
 
                    raise HTTPBadRequest()
 

	
 
            repo_id = request.POST.get('follows_repo_id')
 
            if repo_id:
 
                try:
 
                    self.scm_model.toggle_following_repo(repo_id,
 
                                                self.authuser.user_id)
 
                    Session.commit()
 
                    return 'ok'
 
                except Exception:
 
                    log.error(traceback.format_exc())
 
                    raise HTTPBadRequest()
 

	
 
        log.debug('token mismatch %s vs %s' % (cur_token, token))
 
        raise HTTPBadRequest()
 

	
 
    @LoginRequired()
 
    def public_journal(self):
 
        # Return a rendered template
 
        p = safe_int(request.GET.get('page', 1), 1)
 

	
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        journal = self._get_journal_data(c.following)
 

	
 
        c.journal_pager = Page(journal, page=p, items_per_page=20)
 

	
 
        c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager)
 

	
 
        c.journal_data = render('journal/journal_data.html')
 
        if request.environ.get('HTTP_X_PARTIAL_XHR'):
 
            return c.journal_data
 
        return render('journal/public_journal.html')
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_atom(self):
 
        """
 
        Produce an atom-1.0 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
 
        return self._atom_feed(c.following)
 

	
 
    @LoginRequired(api_access=True)
 
    def public_journal_rss(self):
 
        """
 
        Produce an rss2 feed via feedgenerator module
 
        """
 
        c.following = self.sa.query(UserFollowing)\
 
            .filter(UserFollowing.user_id == self.authuser.user_id)\
 
            .options(joinedload(UserFollowing.follows_repository))\
 
            .all()
 

	
kallithea/controllers/summary.py
Show inline comments
 
@@ -73,98 +73,96 @@ class SummaryController(BaseRepoControll
 

	
 
        branches_group = ([], _("Branches"))
 
        tags_group = ([], _("Tags"))
 

	
 
        for name, chs in c.db_repo_scm_instance.branches.items():
 
            #chs = chs.split(':')[-1]
 
            branches_group[0].append((chs, name),)
 
        download_l.append(branches_group)
 

	
 
        for name, chs in c.db_repo_scm_instance.tags.items():
 
            #chs = chs.split(':')[-1]
 
            tags_group[0].append((chs, name),)
 
        download_l.append(tags_group)
 

	
 
        return download_l
 

	
 
    def __get_readme_data(self, db_repo):
 
        repo_name = db_repo.repo_name
 
        log.debug('Looking for README file')
 

	
 
        @cache_region('long_term')
 
        def _get_readme_from_cache(key, kind):
 
            readme_data = None
 
            readme_file = None
 
            try:
 
                # gets the landing revision! or tip if fails
 
                cs = db_repo.get_landing_changeset()
 
                if isinstance(cs, EmptyChangeset):
 
                    raise EmptyRepositoryError()
 
                renderer = MarkupRenderer()
 
                for f in README_FILES:
 
                    try:
 
                        readme = cs.get_node(f)
 
                        if not isinstance(readme, FileNode):
 
                            continue
 
                        readme_file = f
 
                        log.debug('Found README file `%s` rendering...' %
 
                                  readme_file)
 
                        readme_data = renderer.render(readme.content,
 
                                                      filename=f)
 
                        break
 
                    except NodeDoesNotExistError:
 
                        continue
 
            except ChangesetError:
 
                log.error(traceback.format_exc())
 
                pass
 
            except EmptyRepositoryError:
 
                pass
 
            except Exception:
 
                log.error(traceback.format_exc())
 

	
 
            return readme_data, readme_file
 

	
 
        kind = 'README'
 
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
 
        if not valid:
 
            region_invalidate(_get_readme_from_cache, None, repo_name, kind)
 
        return _get_readme_from_cache(repo_name, kind)
 

	
 
    @LoginRequired()
 
    @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
 
                                   'repository.admin')
 
    def index(self, repo_name):
 
        _load_changelog_summary()
 

	
 
        username = ''
 
        if self.authuser.username != User.DEFAULT_USER:
 
            username = safe_str(self.authuser.username)
 

	
 
        _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl
 
        if '{repo}' in _def_clone_uri:
 
            _def_clone_uri_by_id = _def_clone_uri.replace('{repo}', '_{repoid}')
 
        elif '{repoid}' in _def_clone_uri:
 
            _def_clone_uri_by_id = _def_clone_uri.replace('_{repoid}', '{repo}')
 

	
 
        c.clone_repo_url = c.db_repo.clone_url(user=username,
 
                                                uri_tmpl=_def_clone_uri)
 
        c.clone_repo_url_id = c.db_repo.clone_url(user=username,
 
                                                uri_tmpl=_def_clone_uri_by_id)
 

	
 
        if c.db_repo.enable_statistics:
 
            c.show_stats = True
 
        else:
 
            c.show_stats = False
 

	
 
        stats = self.sa.query(Statistics)\
 
            .filter(Statistics.repository == c.db_repo)\
 
            .scalar()
 

	
 
        c.stats_percentage = 0
 

	
 
        if stats and stats.languages:
 
            c.no_data = False is c.db_repo.enable_statistics
 
            lang_stats_d = json.loads(stats.languages)
 

	
 
            lang_stats = ((x, {"count": y,
 
                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
 
                          for x, y in lang_stats_d.items())
kallithea/lib/auth.py
Show inline comments
 
@@ -666,98 +666,96 @@ class AuthUser(object):
 
        _set = set()
 

	
 
        if inherit_from_default:
 
            default_ips = UserIpMap.query().filter(UserIpMap.user ==
 
                                            User.get_default_user(cache=True))
 
            if cache:
 
                default_ips = default_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_default"))
 

	
 
            # populate from default user
 
            for ip in default_ips:
 
                try:
 
                    _set.add(ip.ip_addr)
 
                except ObjectDeletedError:
 
                    # since we use heavy caching sometimes it happens that we get
 
                    # deleted objects here, we just skip them
 
                    pass
 

	
 
        user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
 
        if cache:
 
            user_ips = user_ips.options(FromCache("sql_cache_short",
 
                                                  "get_user_ips_%s" % user_id))
 

	
 
        for ip in user_ips:
 
            try:
 
                _set.add(ip.ip_addr)
 
            except ObjectDeletedError:
 
                # since we use heavy caching sometimes it happens that we get
 
                # deleted objects here, we just skip them
 
                pass
 
        return _set or set(['0.0.0.0/0', '::/0'])
 

	
 

	
 
def set_available_permissions(config):
 
    """
 
    This function will propagate pylons globals with all available defined
 
    permission given in db. We don't want to check each time from db for new
 
    permissions since adding a new permission also requires application restart
 
    ie. to decorate new views with the newly created permission
 

	
 
    :param config: current pylons config instance
 

	
 
    """
 
    log.info('getting information about all available permissions')
 
    try:
 
        sa = meta.Session
 
        all_perms = sa.query(Permission).all()
 
        config['available_permissions'] = [x.permission_name for x in all_perms]
 
    except Exception:
 
        log.error(traceback.format_exc())
 
    finally:
 
        meta.Session.remove()
 

	
 

	
 
#==============================================================================
 
# CHECK DECORATORS
 
#==============================================================================
 
class LoginRequired(object):
 
    """
 
    Must be logged in to execute this function else
 
    redirect to login page
 

	
 
    :param api_access: if enabled this checks only for valid auth token
 
        and grants access based on valid token
 
    """
 

	
 
    def __init__(self, api_access=False):
 
        self.api_access = api_access
 

	
 
    def __call__(self, func):
 
        return decorator(self.__wrapper, func)
 

	
 
    def __wrapper(self, func, *fargs, **fkwargs):
 
        cls = fargs[0]
 
        user = cls.authuser
 
        loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
 

	
 
        # check if our IP is allowed
 
        ip_access_valid = True
 
        if not user.ip_allowed:
 
            from kallithea.lib import helpers as h
 
            h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr))),
 
                    category='warning')
 
            ip_access_valid = False
 

	
 
        # check if we used an APIKEY and it's a valid one
 
        # defined whitelist of controllers which API access will be enabled
 
        _api_key = request.GET.get('api_key', '')
 
        api_access_valid = allowed_api_access(loc, api_key=_api_key)
 

	
 
        # explicit controller is enabled or API is in our whitelist
 
        if self.api_access or api_access_valid:
 
            log.debug('Checking API KEY access for %s' % cls)
 
            if _api_key and _api_key in user.api_keys:
 
                api_access_valid = True
 
                log.debug('API KEY ****%s is VALID' % _api_key[-4:])
 
            else:
 
                api_access_valid = False
 
@@ -1130,102 +1128,97 @@ class HasRepoGroupPermissionAll(PermsFun
 

	
 

	
 
class HasUserGroupPermissionAny(PermsFunction):
 
    def __call__(self, user_group_name=None, check_location='', user=None):
 
        self.user_group_name = user_group_name
 
        return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['user_groups'][self.user_group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.intersection(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasUserGroupPermissionAll(PermsFunction):
 
    def __call__(self, user_group_name=None, check_location='', user=None):
 
        self.user_group_name = user_group_name
 
        return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
 

	
 
    def check_permissions(self):
 
        try:
 
            self._user_perms = set(
 
                [self.user_perms['user_groups'][self.user_group_name]]
 
            )
 
        except KeyError:
 
            return False
 
        if self.required_perms.issubset(self._user_perms):
 
            return True
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
 
#==============================================================================
 
class HasPermissionAnyMiddleware(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, user, repo_name):
 
        # repo_name MUST be unicode, since we handle keys in permission
 
        # dict by unicode
 
        repo_name = safe_unicode(repo_name)
 
        usr = AuthUser(user.user_id)
 
        try:
 
            self.user_perms = set([usr.permissions['repositories'][repo_name]])
 
        except Exception:
 
            log.error('Exception while accessing permissions %s' %
 
                      traceback.format_exc())
 
            self.user_perms = set()
 
        self.username = user.username
 
        self.repo_name = repo_name
 
        return self.check_permissions()
 

	
 
    def check_permissions(self):
 
        log.debug('checking VCS protocol '
 
                  'permissions %s for user:%s repository:%s', self.user_perms,
 
                                                self.username, self.repo_name)
 
        if self.required_perms.intersection(self.user_perms):
 
            log.debug('Permission to repo: %s granted for user: %s @ %s'
 
                      % (self.repo_name, self.username, 'PermissionMiddleware'))
 
            return True
 
        log.debug('Permission to repo: %s denied for user: %s @ %s'
 
                  % (self.repo_name, self.username, 'PermissionMiddleware'))
 
        return False
 

	
 

	
 
#==============================================================================
 
# SPECIAL VERSION TO HANDLE API AUTH
 
#==============================================================================
 
class _BaseApiPerm(object):
 
    def __init__(self, *perms):
 
        self.required_perms = set(perms)
 

	
 
    def __call__(self, check_location=None, user=None, repo_name=None,
 
                 group_name=None):
 
        cls_name = self.__class__.__name__
 
        check_scope = 'user:%s' % (user)
 
        if repo_name:
 
            check_scope += ', repo:%s' % (repo_name)
 

	
 
        if group_name:
 
            check_scope += ', repo group:%s' % (group_name)
 

	
 
        log.debug('checking cls:%s %s %s @ %s'
 
                  % (cls_name, self.required_perms, check_scope, check_location))
 
        if not user:
 
            log.debug('Empty User passed into arguments')
 
            return False
 

	
 
        ## process user
 
        if not isinstance(user, AuthUser):
 
            user = AuthUser(user.user_id)
 
        if not check_location:
 
            check_location = 'unspecified'
 
        if self.check_permissions(user.permissions, repo_name, group_name):
 
            log.debug('Permission to %s granted for user: %s @ %s'
 
                      % (check_scope, user, check_location))
 
@@ -1273,60 +1266,53 @@ class HasRepoPermissionAllApi(_BaseApiPe
 
        return False
 

	
 

	
 
class HasRepoPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories'][repo_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.intersection(_user_perms):
 
            return True
 
        return False
 

	
 

	
 
class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories_groups'][group_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.intersection(_user_perms):
 
            return True
 
        return False
 

	
 
class HasRepoGroupPermissionAllApi(_BaseApiPerm):
 
    def check_permissions(self, perm_defs, repo_name=None, group_name=None):
 
        try:
 
            _user_perms = set([perm_defs['repositories_groups'][group_name]])
 
        except KeyError:
 
            log.warning(traceback.format_exc())
 
            return False
 
        if self.required_perms.issubset(_user_perms):
 
            return True
 
        return False
 

	
 
def check_ip_access(source_ip, allowed_ips=None):
 
    """
 
    Checks if source_ip is a subnet of any of allowed_ips.
 

	
 
    :param source_ip:
 
    :param allowed_ips: list of allowed ips together with mask
 
    """
 
    from kallithea.lib import ipaddr
 
    log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
 
    if isinstance(allowed_ips, (tuple, list, set)):
 
        for ip in allowed_ips:
 
            try:
 
                if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip):
 
                    log.debug('IP %s is network %s' %
 
                              (ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip)))
 
                    return True
 
                # for any case we cannot determine the IP, don't crash just
 
                # skip it and log as error, we want to say forbidden still when
 
                # sending bad IP
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                continue
 
    return False
kallithea/lib/auth_modules/__init__.py
Show inline comments
 
@@ -248,102 +248,98 @@ class KallitheaAuthPluginBase(object):
 
class KallitheaExternalAuthPlugin(KallitheaAuthPluginBase):
 
    def use_fake_password(self):
 
        """
 
        Return a boolean that indicates whether or not we should set the user's
 
        password to a random value when it is authenticated by this plugin.
 
        If your plugin provides authentication, then you will generally want this.
 

	
 
        :returns: boolean
 
        """
 
        raise NotImplementedError("Not implemented in base class")
 

	
 
    def _authenticate(self, userobj, username, passwd, settings, **kwargs):
 
        auth = super(KallitheaExternalAuthPlugin, self)._authenticate(
 
            userobj, username, passwd, settings, **kwargs)
 
        if auth:
 
            # maybe plugin will clean the username ?
 
            # we should use the return value
 
            username = auth['username']
 
            # if user is not active from our extern type we should fail to authe
 
            # this can prevent from creating users in Kallithea when using
 
            # external authentication, but if it's inactive user we shouldn't
 
            # create that user anyway
 
            if auth['active_from_extern'] is False:
 
                log.warning("User %s authenticated against %s, but is inactive"
 
                            % (username, self.__module__))
 
                return None
 

	
 
            if self.use_fake_password():
 
                # Randomize the PW because we don't need it, but don't want
 
                # them blank either
 
                passwd = PasswordGenerator().gen_password(length=8)
 

	
 
            log.debug('Updating or creating user info from %s plugin'
 
                      % self.name)
 
            user = UserModel().create_or_update(
 
                username=username,
 
                password=passwd,
 
                email=auth["email"],
 
                firstname=auth["firstname"],
 
                lastname=auth["lastname"],
 
                active=auth["active"],
 
                admin=auth["admin"],
 
                extern_name=auth["extern_name"],
 
                extern_type=self.name
 
            )
 
            Session().flush()
 
            # enforce user is just in given groups, all of them has to be ones
 
            # created from plugins. We store this info in _group_data JSON field
 
            try:
 
                groups = auth['groups'] or []
 
                UserGroupModel().enforce_groups(user, groups, self.name)
 
            except Exception:
 
                # for any reason group syncing fails, we should proceed with login
 
                log.error(traceback.format_exc())
 
            Session().commit()
 
        return auth
 

	
 

	
 
def importplugin(plugin):
 
    """
 
    Imports and returns the authentication plugin in the module named by plugin
 
    (e.g., plugin='kallithea.lib.auth_modules.auth_internal'). Returns the
 
    KallitheaAuthPluginBase subclass on success, raises exceptions on failure.
 

	
 
    raises:
 
        AttributeError -- no KallitheaAuthPlugin class in the module
 
        TypeError -- if the KallitheaAuthPlugin is not a subclass of ours KallitheaAuthPluginBase
 
        ImportError -- if we couldn't import the plugin at all
 
    """
 
    log.debug("Importing %s" % plugin)
 
    if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'):
 
        parts = plugin.split(u'.lib.auth_modules.auth_', 1)
 
        if len(parts) == 2:
 
            _module, pn = parts
 
            if pn == EXTERN_TYPE_INTERNAL:
 
                pn = "internal"
 
            plugin = u'kallithea.lib.auth_modules.auth_' + pn
 
    PLUGIN_CLASS_NAME = "KallitheaAuthPlugin"
 
    try:
 
        module = importlib.import_module(plugin)
 
    except (ImportError, TypeError):
 
        log.error(traceback.format_exc())
 
        # TODO: make this more error prone, if by some accident we screw up
 
        # the plugin name, the crash is preatty bad and hard to recover
 
        raise
 

	
 
    log.debug("Loaded auth plugin from %s (module:%s, file:%s)"
 
              % (plugin, module.__name__, module.__file__))
 

	
 
    pluginclass = getattr(module, PLUGIN_CLASS_NAME)
 
    if not issubclass(pluginclass, KallitheaAuthPluginBase):
 
        raise TypeError("Authentication class %s.KallitheaAuthPlugin is not "
 
                        "a subclass of %s" % (plugin, KallitheaAuthPluginBase))
 
    return pluginclass
 

	
 

	
 
def loadplugin(plugin):
 
    """
 
    Loads and returns an instantiated authentication plugin.
 

	
 
        see: importplugin
 
    """
kallithea/lib/auth_modules/auth_pam.py
Show inline comments
 
@@ -87,54 +87,54 @@ class KallitheaAuthPlugin(auth_modules.K
 
    def auth(self, userobj, username, password, settings, **kwargs):
 
        if username not in _auth_cache:
 
            # Need lock here, as PAM authentication is not thread safe
 
            _pam_lock.acquire()
 
            try:
 
                auth_result = pam.authenticate(username, password,
 
                                               settings["service"])
 
                # cache result only if we properly authenticated
 
                if auth_result:
 
                    _auth_cache[username] = time.time()
 
            finally:
 
                _pam_lock.release()
 

	
 
            if not auth_result:
 
                log.error("PAM was unable to authenticate user: %s" % (username,))
 
                return None
 
        else:
 
            log.debug("Using cached auth for user: %s" % (username,))
 

	
 
        # old attrs fetched from Kallithea database
 
        admin = getattr(userobj, 'admin', False)
 
        active = getattr(userobj, 'active', True)
 
        email = getattr(userobj, 'email', '') or "%s@%s" % (username, socket.gethostname())
 
        firstname = getattr(userobj, 'firstname', '')
 
        lastname = getattr(userobj, 'lastname', '')
 
        extern_type = getattr(userobj, 'extern_type', '')
 

	
 
        user_attrs = {
 
            'username': username,
 
            'firstname': firstname,
 
            'lastname': lastname,
 
            'groups': [g.gr_name for g in grp.getgrall() if username in g.gr_mem],
 
            'email': email,
 
            'admin': admin,
 
            'active': active,
 
            "active_from_extern": None,
 
            'extern_name': username,
 
            'extern_type': extern_type,
 
        }
 

	
 
        try:
 
            user_data = pwd.getpwnam(username)
 
            regex = settings["gecos"]
 
            match = re.search(regex, user_data.pw_gecos)
 
            if match:
 
                user_attrs["firstname"] = match.group('first_name')
 
                user_attrs["lastname"] = match.group('last_name')
 
        except Exception:
 
            log.warning("Cannot extract additional info for PAM user")
 
            log.warning("Cannot extract additional info for PAM user %s", username)
 
            pass
 

	
 
        log.debug("pamuser: \n%s" % formatted_json(user_attrs))
 
        log.info('user %s authenticated correctly' % user_attrs['username'])
 
        return user_attrs
kallithea/lib/celerylib/tasks.py
Show inline comments
 
@@ -14,103 +14,100 @@
 
"""
 
kallithea.lib.celerylib.tasks
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Kallithea task modules, containing all task that suppose to be run
 
by celery daemon
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Oct 6, 2010
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from celery.decorators import task
 

	
 
import os
 
import traceback
 
import logging
 
from os.path import join as jn
 

	
 
from time import mktime
 
from operator import itemgetter
 
from string import lower
 

	
 
from pylons import config
 

	
 
from kallithea import CELERY_ON
 
from kallithea.lib.celerylib import run_task, locked_task, dbsession, \
 
    str2bool, __get_lockkey, LockHeld, DaemonLock, get_session
 
from kallithea.lib.helpers import person
 
from kallithea.lib.rcmail.smtp_mailer import SmtpMailer
 
from kallithea.lib.utils import add_cache, action_logger
 
from kallithea.lib.compat import json, OrderedDict
 
from kallithea.lib.hooks import log_create_repository
 

	
 
from kallithea.model.db import Statistics, Repository, User
 

	
 

	
 
add_cache(config)  # pragma: no cover
 

	
 
__all__ = ['whoosh_index', 'get_commits_stats', 'send_email']
 

	
 

	
 
def get_logger(cls):
 
    if CELERY_ON:
 
        try:
 
            log = cls.get_logger()
 
        except Exception:
 
            log = logging.getLogger(__name__)
 
    else:
 
        log = logging.getLogger(__name__)
 

	
 
    return log
 
            return cls.get_logger()
 
        except AttributeError:
 
            pass
 
    return logging.getLogger(__name__)
 

	
 

	
 
@task(ignore_result=True)
 
@locked_task
 
@dbsession
 
def whoosh_index(repo_location, full_index):
 
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
 
    DBS = get_session()
 

	
 
    index_location = config['index_dir']
 
    WhooshIndexingDaemon(index_location=index_location,
 
                         repo_location=repo_location, sa=DBS)\
 
                         .run(full_index=full_index)
 

	
 

	
 
@task(ignore_result=True)
 
@dbsession
 
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
 
    log = get_logger(get_commits_stats)
 
    DBS = get_session()
 
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
 
                            ts_max_y)
 
    lockkey_path = config['app_conf']['cache_dir']
 

	
 
    log.info('running task with lockkey %s' % lockkey)
 

	
 
    try:
 
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))
 

	
 
        # for js data compatibility cleans the key for person from '
 
        akc = lambda k: person(k).replace('"', "")
 

	
 
        co_day_auth_aggr = {}
 
        commits_by_day_aggregate = {}
 
        repo = Repository.get_by_repo_name(repo_name)
 
        if repo is None:
 
            return True
 

	
 
        repo = repo.scm_instance
 
        repo_size = repo.count()
 
        # return if repo have no revisions
 
        if repo_size < 1:
 
            lock.release()
 
            return True
 

	
 
        skip_date_limit = True
 
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
 
        last_rev = None
kallithea/lib/db_manage.py
Show inline comments
 
@@ -145,136 +145,124 @@ class DbManage(object):
 
        try:
 
            curr_version = api.db_version(db_uri, repository_path)
 
            msg = ('Found current database under version '
 
                   'control with version %s' % curr_version)
 

	
 
        except (RuntimeError, DatabaseNotControlledError):
 
            curr_version = 1
 
            msg = ('Current database is not under version control. Setting '
 
                   'as version %s' % curr_version)
 
            api.version_control(db_uri, repository_path, curr_version)
 

	
 
        notify(msg)
 
        if curr_version == __dbversion__:
 
            print 'This database is already at the newest version'
 
            sys.exit(0)
 

	
 
        # clear cache keys
 
        log.info("Clearing cache keys now...")
 
        CacheInvalidation.clear_cache()
 

	
 
        upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
 
        notify('attempting to do database upgrade from '
 
               'version %s to version %s' % (curr_version, __dbversion__))
 

	
 
        # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
 
        _step = None
 
        for step in upgrade_steps:
 
            notify('performing upgrade step %s' % step)
 
            time.sleep(0.5)
 

	
 
            api.upgrade(db_uri, repository_path, step)
 
            notify('schema upgrade for step %s completed' % (step,))
 

	
 
            _step = step
 

	
 
        notify('upgrade to version %s successful' % _step)
 

	
 
    def fix_repo_paths(self):
 
        """
 
        Fixes a old kallithea version path into new one without a '*'
 
        """
 

	
 
        paths = self.sa.query(Ui)\
 
                .filter(Ui.ui_key == '/')\
 
                .scalar()
 

	
 
        paths.ui_value = paths.ui_value.replace('*', '')
 

	
 
        try:
 
            self.sa.add(paths)
 
            self.sa.commit()
 
        except Exception:
 
            self.sa.rollback()
 
            raise
 

	
 
    def fix_default_user(self):
 
        """
 
        Fixes a old default user with some 'nicer' default values,
 
        used mostly for anonymous access
 
        """
 
        def_user = self.sa.query(User)\
 
                .filter(User.username == User.DEFAULT_USER)\
 
                .one()
 

	
 
        def_user.name = 'Anonymous'
 
        def_user.lastname = 'User'
 
        def_user.email = 'anonymous@kallithea-scm.org'
 

	
 
        try:
 
            self.sa.add(def_user)
 
            self.sa.commit()
 
        except Exception:
 
            self.sa.rollback()
 
            raise
 

	
 
    def fix_settings(self):
 
        """
 
        Fixes kallithea settings adds ga_code key for google analytics
 
        """
 

	
 
        hgsettings3 = Setting('ga_code', '')
 

	
 
        try:
 
            self.sa.add(hgsettings3)
 
            self.sa.commit()
 
        except Exception:
 
            self.sa.rollback()
 
            raise
 

	
 
    def admin_prompt(self, second=False):
 
        if not self.tests:
 
            import getpass
 

	
 
            # defaults
 
            defaults = self.cli_args
 
            username = defaults.get('username')
 
            password = defaults.get('password')
 
            email = defaults.get('email')
 

	
 
            def get_password():
 
                password = getpass.getpass('Specify admin password '
 
                                           '(min 6 chars):')
 
                confirm = getpass.getpass('Confirm password:')
 

	
 
                if password != confirm:
 
                    log.error('passwords mismatch')
 
                    return False
 
                if len(password) < 6:
 
                    log.error('password is to short use at least 6 characters')
 
                    return False
 

	
 
                return password
 
            if username is None:
 
                username = raw_input('Specify admin username:')
 
            if password is None:
 
                password = get_password()
 
                if not password:
 
                    #second try
 
                    password = get_password()
 
                    if not password:
 
                        sys.exit()
 
            if email is None:
 
                email = raw_input('Specify admin email:')
 
            self.create_user(username, password, email, True)
 
        else:
 
            log.info('creating admin and regular test users')
 
            from kallithea.tests import TEST_USER_ADMIN_LOGIN, \
 
            TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \
 
            TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \
 
            TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \
 
            TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL
 

	
 
            self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
 
                             TEST_USER_ADMIN_EMAIL, True)
 

	
 
            self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
kallithea/lib/helpers.py
Show inline comments
 
@@ -1273,158 +1273,152 @@ def urlify_changesets(text_, repository)
 
    :param repository: repo name to build the URL with
 
    """
 
    from pylons import url  # doh, we need to re-import url to mock it later
 

	
 
    def url_func(match_obj):
 
        rev = match_obj.group(0)
 
        return '<a class="revision-link" href="%(url)s">%(rev)s</a>' % {
 
         'url': url('changeset_home', repo_name=repository, revision=rev),
 
         'rev': rev,
 
        }
 

	
 
    return re.sub(r'(?:^|(?<=[\s(),]))([0-9a-fA-F]{12,40})(?=$|\s|[.,:()])', url_func, text_)
 

	
 
def linkify_others(t, l):
 
    urls = re.compile(r'(\<a.*?\<\/a\>)',)
 
    links = []
 
    for e in urls.split(t):
 
        if not urls.match(e):
 
            links.append('<a class="message-link" href="%s">%s</a>' % (l, e))
 
        else:
 
            links.append(e)
 

	
 
    return ''.join(links)
 

	
 
def urlify_commit(text_, repository, link_=None):
 
    """
 
    Parses given text message and makes proper links.
 
    issues are linked to given issue-server, and rest is a changeset link
 
    if link_ is given, in other case it's a plain text
 

	
 
    :param text_:
 
    :param repository:
 
    :param link_: changeset link
 
    """
 
    def escaper(string):
 
        return string.replace('<', '&lt;').replace('>', '&gt;')
 

	
 
    # urlify changesets - extract revisions and make link out of them
 
    newtext = urlify_changesets(escaper(text_), repository)
 

	
 
    # extract http/https links and make them real urls
 
    newtext = urlify_text(newtext, safe=False)
 

	
 
    newtext = urlify_issues(newtext, repository, link_)
 

	
 
    return literal(newtext)
 

	
 
def urlify_issues(newtext, repository, link_=None):
 
    try:
 
        import traceback
 
        from kallithea import CONFIG
 
        conf = CONFIG
 
    from kallithea import CONFIG as conf
 

	
 
        # allow multiple issue servers to be used
 
        valid_indices = [
 
            x.group(1)
 
            for x in map(lambda x: re.match(r'issue_pat(.*)', x), conf.keys())
 
            if x and 'issue_server_link%s' % x.group(1) in conf
 
            and 'issue_prefix%s' % x.group(1) in conf
 
        ]
 

	
 
        if valid_indices:
 
            log.debug('found issue server suffixes `%s` during valuation of: %s'
 
                      % (','.join(valid_indices), newtext))
 

	
 
        for pattern_index in valid_indices:
 
            ISSUE_PATTERN = conf.get('issue_pat%s' % pattern_index)
 
            ISSUE_SERVER_LNK = conf.get('issue_server_link%s' % pattern_index)
 
            ISSUE_PREFIX = conf.get('issue_prefix%s' % pattern_index)
 

	
 
            log.debug('pattern suffix `%s` PAT:%s SERVER_LINK:%s PREFIX:%s'
 
                      % (pattern_index, ISSUE_PATTERN, ISSUE_SERVER_LNK,
 
                         ISSUE_PREFIX))
 

	
 
            URL_PAT = re.compile(r'%s' % ISSUE_PATTERN)
 

	
 
            def url_func(match_obj):
 
                pref = ''
 
                if match_obj.group().startswith(' '):
 
                    pref = ' '
 

	
 
                issue_id = ''.join(match_obj.groups())
 
                issue_url = ISSUE_SERVER_LNK.replace('{id}', issue_id)
 
                if repository:
 
                    issue_url = issue_url.replace('{repo}', repository)
 
                    repo_name = repository.split(URL_SEP)[-1]
 
                    issue_url = issue_url.replace('{repo_name}', repo_name)
 

	
 
                return (
 
                    '%(pref)s<a class="%(cls)s" href="%(url)s">'
 
                    '%(issue-prefix)s%(id-repr)s'
 
                    '</a>'
 
                    ) % {
 
                     'pref': pref,
 
                     'cls': 'issue-tracker-link',
 
                     'url': issue_url,
 
                     'id-repr': issue_id,
 
                     'issue-prefix': ISSUE_PREFIX,
 
                     'serv': ISSUE_SERVER_LNK,
 
                    }
 
            newtext = URL_PAT.sub(url_func, newtext)
 
            log.debug('processed prefix:`%s` => %s' % (pattern_index, newtext))
 

	
 
        # if we actually did something above
 
        if link_:
 
            # wrap not links into final link => link_
 
            newtext = linkify_others(newtext, link_)
 
    except Exception:
 
        log.error(traceback.format_exc())
 
        pass
 
    return newtext
 

	
 

	
 
def rst(source):
 
    return literal('<div class="rst-block">%s</div>' %
 
                   MarkupRenderer.rst(source))
 

	
 

	
 
def rst_w_mentions(source):
 
    """
 
    Wrapped rst renderer with @mention highlighting
 

	
 
    :param source:
 
    """
 
    return literal('<div class="rst-block">%s</div>' %
 
                   MarkupRenderer.rst_with_mentions(source))
 

	
 
def short_ref(ref_type, ref_name):
 
    if ref_type == 'rev':
 
        return short_id(ref_name)
 
    return ref_name
 

	
 
def link_to_ref(repo_name, ref_type, ref_name, rev=None):
 
    """
 
    Return full markup for a href to changeset_home for a changeset.
 
    If ref_type is branch it will link to changelog.
 
    ref_name is shortened if ref_type is 'rev'.
 
    if rev is specified show it too, explicitly linking to that revision.
 
    """
 
    txt = short_ref(ref_type, ref_name)
 
    if ref_type == 'branch':
 
        u = url('changelog_home', repo_name=repo_name, branch=ref_name)
 
    else:
 
        u = url('changeset_home', repo_name=repo_name, revision=ref_name)
 
    l = link_to(repo_name + '#' + txt, u)
 
    if rev and ref_type != 'rev':
 
        l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev))))
 
    return l
 

	
 
def changeset_status(repo, revision):
 
    return ChangesetStatusModel().get_status(repo, revision)
 

	
 

	
 
def changeset_status_lbl(changeset_status):
 
    return dict(ChangesetStatus.STATUSES).get(changeset_status)
 

	
 

	
 
def get_permission_name(key):
kallithea/lib/utils.py
Show inline comments
 
@@ -75,193 +75,182 @@ def recursive_replace(str_, replace=' ')
 
    Examples::
 
    >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-')
 
    'Mighty-Mighty-Bo-sstones'
 
    """
 

	
 
    if str_.find(replace * 2) == -1:
 
        return str_
 
    else:
 
        str_ = str_.replace(replace * 2, replace)
 
        return recursive_replace(str_, replace)
 

	
 

	
 
def repo_name_slug(value):
 
    """
 
    Return slug of name of repository
 
    This function is called on each creation/modification
 
    of repository to prevent bad names in repo
 
    """
 

	
 
    slug = remove_formatting(value)
 
    slug = strip_tags(slug)
 

	
 
    for c in """`?=[]\;'"<>,/~!@#$%^&*()+{}|: """:
 
        slug = slug.replace(c, '-')
 
    slug = recursive_replace(slug, '-')
 
    slug = collapse(slug, '-')
 
    return slug
 

	
 

	
 
#==============================================================================
 
# PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
 
#==============================================================================
 
def get_repo_slug(request):
 
    _repo = request.environ['pylons.routes_dict'].get('repo_name')
 
    if _repo:
 
        _repo = _repo.rstrip('/')
 
    return _repo
 

	
 

	
 
def get_repo_group_slug(request):
 
    _group = request.environ['pylons.routes_dict'].get('group_name')
 
    if _group:
 
        _group = _group.rstrip('/')
 
    return _group
 

	
 

	
 
def get_user_group_slug(request):
 
    _group = request.environ['pylons.routes_dict'].get('id')
 
    try:
 
        _group = UserGroup.get(_group)
 
        if _group:
 
            _group = _group.users_group_name
 
    except Exception:
 
        log.debug(traceback.format_exc())
 
        #catch all failures here
 
        pass
 

	
 
    return _group
 
        return _group.users_group_name
 
    return None
 

	
 

	
 
def _extract_id_from_repo_name(repo_name):
 
    if repo_name.startswith('/'):
 
        repo_name = repo_name.lstrip('/')
 
    by_id_match = re.match(r'^_(\d{1,})', repo_name)
 
    if by_id_match:
 
        return by_id_match.groups()[0]
 

	
 

	
 
def get_repo_by_id(repo_name):
 
    """
 
    Extracts repo_name by id from special urls. Example url is _11/repo_name
 

	
 
    :param repo_name:
 
    :return: repo_name if matched else None
 
    """
 
    try:
 
        _repo_id = _extract_id_from_repo_name(repo_name)
 
        if _repo_id:
 
            from kallithea.model.db import Repository
 
            return Repository.get(_repo_id).repo_name
 
    except Exception:
 
        log.debug('Failed to extract repo_name from URL %s' % (
 
                  traceback.format_exc()))
 
        return
 
        repo = Repository.get(_repo_id)
 
        if repo:
 
            # TODO: return repo instead of reponame? or would that be a layering violation?
 
            return repo.repo_name
 
    return None
 

	
 

	
 
def action_logger(user, action, repo, ipaddr='', sa=None, commit=False):
 
    """
 
    Action logger for various actions made by users
 

	
 
    :param user: user that made this action, can be a unique username string or
 
        object containing user_id attribute
 
    :param action: action to log, should be on of predefined unique actions for
 
        easy translations
 
    :param repo: string name of repository or object containing repo_id,
 
        that action was made on
 
    :param ipaddr: optional ip address from what the action was made
 
    :param sa: optional sqlalchemy session
 

	
 
    """
 

	
 
    if not sa:
 
        sa = meta.Session()
 
    # if we don't get explicit IP address try to get one from registered user
 
    # in tmpl context var
 
    if not ipaddr:
 
        ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
 

	
 
    try:
 
        if getattr(user, 'user_id', None):
 
            user_obj = User.get(user.user_id)
 
        elif isinstance(user, basestring):
 
            user_obj = User.get_by_username(user)
 
        else:
 
            raise Exception('You have to provide a user object or a username')
 

	
 
        if getattr(repo, 'repo_id', None):
 
            repo_obj = Repository.get(repo.repo_id)
 
            repo_name = repo_obj.repo_name
 
        elif isinstance(repo, basestring):
 
            repo_name = repo.lstrip('/')
 
            repo_obj = Repository.get_by_repo_name(repo_name)
 
        else:
 
            repo_obj = None
 
            repo_name = ''
 

	
 
        user_log = UserLog()
 
        user_log.user_id = user_obj.user_id
 
        user_log.username = user_obj.username
 
        user_log.action = safe_unicode(action)
 

	
 
        user_log.repository = repo_obj
 
        user_log.repository_name = repo_name
 

	
 
        user_log.action_date = datetime.datetime.now()
 
        user_log.user_ip = ipaddr
 
        sa.add(user_log)
 

	
 
        log.info('Logging action:%s on %s by user:%s ip:%s' %
 
                 (action, safe_unicode(repo), user_obj, ipaddr))
 
        if commit:
 
            sa.commit()
 
    except Exception:
 
        log.error(traceback.format_exc())
 
        raise
 

	
 

	
 
def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
 
    """
 
    Scans given path for repos and return (name,(type,path)) tuple
 

	
 
    :param path: path to scan for repositories
 
    :param recursive: recursive search and return names with subdirs in front
 
    """
 

	
 
    # remove ending slash for better results
 
    path = path.rstrip(os.sep)
 
    log.debug('now scanning in %s location recursive:%s...' % (path, recursive))
 

	
 
    def _get_repos(p):
 
        if not os.access(p, os.R_OK) or not os.access(p, os.X_OK):
 
            log.warning('ignoring repo path without access: %s' % (p,))
 
            return
 
        if not os.access(p, os.W_OK):
 
            log.warning('repo path without write access: %s' % (p,))
 
        for dirpath in os.listdir(p):
 
            if os.path.isfile(os.path.join(p, dirpath)):
 
                continue
 
            cur_path = os.path.join(p, dirpath)
 

	
 
            # skip removed repos
 
            if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
 
                continue
 

	
 
            #skip .<somethin> dirs
 
            if dirpath.startswith('.'):
 
                continue
 

	
 
            try:
 
                scm_info = get_scm(cur_path)
 
                yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
 
            except VCSError:
 
                if not recursive:
 
                    continue
 
                #check if this dir containts other repos for recursive scan
 
                rec_path = os.path.join(p, dirpath)
 
                if os.path.isdir(rec_path):
 
                    for inner_scm in _get_repos(rec_path):
 
                        yield inner_scm
 

	
 
    return _get_repos(path)
 

	
 

	
 
@@ -771,97 +760,97 @@ class BasePasterCommand(Command):
 

	
 
    def bootstrap_config(self, conf):
 
        """
 
        Loads the pylons configuration.
 
        """
 
        from pylons import config as pylonsconfig
 

	
 
        self.path_to_ini_file = os.path.realpath(conf)
 
        conf = paste.deploy.appconfig('config:' + self.path_to_ini_file)
 
        pylonsconfig.init_app(conf.global_conf, conf.local_conf)
 

	
 
    def _init_session(self):
 
        """
 
        Inits SqlAlchemy Session
 
        """
 
        logging.config.fileConfig(self.path_to_ini_file)
 
        from pylons import config
 
        from kallithea.model import init_model
 
        from kallithea.lib.utils2 import engine_from_config
 

	
 
        #get to remove repos !!
 
        add_cache(config)
 
        engine = engine_from_config(config, 'sqlalchemy.db1.')
 
        init_model(engine)
 

	
 

	
 
def check_git_version():
 
    """
 
    Checks what version of git is installed in system, and issues a warning
 
    if it's too old for Kallithea to properly work.
 
    """
 
    from kallithea import BACKENDS
 
    from kallithea.lib.vcs.backends.git.repository import GitRepository
 
    from kallithea.lib.vcs.conf import settings
 
    from distutils.version import StrictVersion
 

	
 
    if 'git' not in BACKENDS:
 
        return None
 

	
 
    stdout, stderr = GitRepository._run_git_command('--version', _bare=True,
 
                                                    _safe=True)
 

	
 
    ver = (stdout.split(' ')[-1] or '').strip() or '0.0.0'
 
    if len(ver.split('.')) > 3:
 
        #StrictVersion needs to be only 3 element type
 
        ver = '.'.join(ver.split('.')[:3])
 
    try:
 
        _ver = StrictVersion(ver)
 
    except Exception:
 
    except ValueError:
 
        _ver = StrictVersion('0.0.0')
 
        stderr = traceback.format_exc()
 

	
 
    req_ver = '1.7.4'
 
    to_old_git = False
 
    if  _ver < StrictVersion(req_ver):
 
        to_old_git = True
 

	
 
    if 'git' in BACKENDS:
 
        log.debug('Git executable: "%s" version detected: %s'
 
                  % (settings.GIT_EXECUTABLE_PATH, stdout))
 
        if stderr:
 
            log.warning('Unable to detect git version, org error was: %r' % stderr)
 
        elif to_old_git:
 
            log.warning('Kallithea detected git version %s, which is too old '
 
                        'for the system to function properly. Make sure '
 
                        'its version is at least %s' % (ver, req_ver))
 
    return _ver
 

	
 

	
 
@decorator.decorator
 
def jsonify(func, *args, **kwargs):
 
    """Action decorator that formats output for JSON
 

	
 
    Given a function that will return content, this decorator will turn
 
    the result into JSON, with a content-type of 'application/json' and
 
    output it.
 

	
 
    """
 
    from pylons.decorators.util import get_pylons
 
    from kallithea.lib.compat import json
 
    pylons = get_pylons(args)
 
    pylons.response.headers['Content-Type'] = 'application/json; charset=utf-8'
 
    data = func(*args, **kwargs)
 
    if isinstance(data, (list, tuple)):
 
        msg = "JSON responses with Array envelopes are susceptible to " \
 
              "cross-site data leak attacks, see " \
 
              "http://wiki.pylonshq.com/display/pylonsfaq/Warnings"
 
        warnings.warn(msg, Warning, 2)
 
        log.warning(msg)
 
    log.debug("Returning JSON wrapped action output")
 
    return json.dumps(data, encoding='utf-8')
 

	
 

	
 
def conditional_cache(region, prefix, condition, func):
 
    """
 

	
 
    Conditional caching function use like::
kallithea/lib/utils2.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.lib.utils
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Some simple helper functions
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jan 5, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 

	
 
import os
 
import re
 
import sys
 
import time
 
import uuid
 
import datetime
 
import traceback
 
import webob
 
import urllib
 
import urlobject
 

	
 
from pylons.i18n.translation import _, ungettext
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 
from kallithea.lib.compat import json
 

	
 

	
 
def __get_lem():
 
    """
 
    Get language extension map based on what's inside pygments lexers
 
    """
 
    from pygments import lexers
 
    from string import lower
 
    from collections import defaultdict
 

	
 
    d = defaultdict(lambda: [])
 

	
 
    def __clean(s):
 
        s = s.lstrip('*')
 
        s = s.lstrip('.')
 

	
 
        if s.find('[') != -1:
 
            exts = []
 
            start, stop = s.find('['), s.find(']')
 

	
 
            for suffix in s[start + 1:stop]:
 
                exts.append(s[:s.find('[')] + suffix)
 
            return map(lower, exts)
 
        else:
 
            return map(lower, [s])
 

	
 
    for lx, t in sorted(lexers.LEXERS.items()):
 
        m = map(__clean, t[-2])
 
        if m:
 
            m = reduce(lambda x, y: x + y, m)
 
            for ext in m:
 
                desc = lx.replace('Lexer', '')
 
                d[ext].append(desc)
 

	
 
    return dict(d)
 

	
 

	
 
def str2bool(_str):
 
    """
 
    returs True/False value from given string, it tries to translate the
 
    string into boolean
 
@@ -551,125 +550,123 @@ def time_to_datetime(tm):
 
            except ValueError:
 
                return
 
        return datetime.datetime.fromtimestamp(tm)
 

	
 
# Must match regexp in kallithea/public/js/base.js MentionsAutoComplete()
 
# Eat char before @ - it must not look like we are in an email addresses.
 
# Matching is gready so we don't have to look beyond the end.
 
MENTIONS_REGEX = re.compile(r'(?:^|[^a-zA-Z0-9])@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])')
 

	
 
def extract_mentioned_users(s):
 
    r"""
 
    Returns unique usernames from given string s that have @mention
 

	
 
    :param s: string to get mentions
 

	
 
    >>> extract_mentioned_users('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,')
 
    ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'hh', 'zz']
 
    """
 
    usrs = set()
 
    for username in MENTIONS_REGEX.findall(s):
 
        usrs.add(username)
 

	
 
    return sorted(list(usrs), key=lambda k: k.lower())
 

	
 

	
 
class AttributeDict(dict):
 
    def __getattr__(self, attr):
 
        return self.get(attr, None)
 
    __setattr__ = dict.__setitem__
 
    __delattr__ = dict.__delitem__
 

	
 

	
 
def fix_PATH(os_=None):
 
    """
 
    Get current active python path, and append it to PATH variable to fix issues
 
    of subprocess calls and different python versions
 
    """
 
    if os_ is None:
 
        import os
 
    else:
 
        os = os_
 

	
 
    cur_path = os.path.split(sys.executable)[0]
 
    if not os.environ['PATH'].startswith(cur_path):
 
        os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
 

	
 

	
 
def obfuscate_url_pw(engine):
 
    _url = engine or ''
 
    from sqlalchemy.engine import url as sa_url
 
    from sqlalchemy.exc import ArgumentError
 
    try:
 
        _url = sa_url.make_url(engine)
 
        _url = sa_url.make_url(engine or '')
 
    except ArgumentError:
 
        return engine
 
        if _url.password:
 
            _url.password = 'XXXXX'
 
    except Exception:
 
        pass
 
    return str(_url)
 

	
 

	
 
def get_server_url(environ):
 
    req = webob.Request(environ)
 
    return req.host_url + req.script_name
 

	
 

	
 
def _extract_extras(env=None):
 
    """
 
    Extracts the rc extras data from os.environ, and wraps it into named
 
    AttributeDict object
 
    """
 
    if not env:
 
        env = os.environ
 

	
 
    try:
 
        rc_extras = json.loads(env['KALLITHEA_EXTRAS'])
 
    except Exception:
 
        print os.environ
 
        print >> sys.stderr, traceback.format_exc()
 
    except KeyError:
 
        rc_extras = {}
 

	
 
    try:
 
        for k in ['username', 'repository', 'locked_by', 'scm', 'make_lock',
 
                  'action', 'ip']:
 
            rc_extras[k]
 
    except KeyError, e:
 
        raise Exception('Missing key %s in os.environ %s' % (e, rc_extras))
 

	
 
    return AttributeDict(rc_extras)
 

	
 

	
 
def _set_extras(extras):
 
    # RC_SCM_DATA can probably be removed in the future, but for compatibilty now...
 
    os.environ['KALLITHEA_EXTRAS'] = os.environ['RC_SCM_DATA'] = json.dumps(extras)
 

	
 

	
 
def unique_id(hexlen=32):
 
    alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
 
    return suuid(truncate_to=hexlen, alphabet=alphabet)
 

	
 

	
 
def suuid(url=None, truncate_to=22, alphabet=None):
 
    """
 
    Generate and return a short URL safe UUID.
 

	
 
    If the url parameter is provided, set the namespace to the provided
 
    URL and generate a UUID.
 

	
 
    :param url to get the uuid for
 
    :truncate_to: truncate the basic 22 UUID to shorter version
 

	
 
    The IDs won't be universally unique any longer, but the probability of
 
    a collision will still be very low.
 
    """
 
    # Define our alphabet.
 
    _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
 

	
 
    # If no URL is given, generate a random UUID.
 
    if url is None:
 
        unique_id = uuid.uuid4().int
 
    else:
 
        unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
 

	
 
    alphabet_length = len(_ALPHABET)
 
    output = []
 
    while unique_id > 0:
 
        digit = unique_id % alphabet_length
kallithea/lib/vcs/backends/git/repository.py
Show inline comments
 
@@ -249,97 +249,97 @@ class GitRepository(BaseRepository):
 
            else:
 
                return self._repo
 
        except (NotGitRepository, OSError), err:
 
            raise RepositoryError(err)
 

	
 
    def _get_all_revisions(self):
 
        # we must check if this repo is not empty, since later command
 
        # fails if it is. And it's cheaper to ask than throw the subprocess
 
        # errors
 
        try:
 
            self._repo.head()
 
        except KeyError:
 
            return []
 

	
 
        rev_filter = settings.GIT_REV_FILTER
 
        cmd = 'rev-list %s --reverse --date-order' % (rev_filter)
 
        try:
 
            so, se = self.run_git_command(cmd)
 
        except RepositoryError:
 
            # Can be raised for empty repositories
 
            return []
 
        return so.splitlines()
 

	
 
    def _get_all_revisions2(self):
 
        #alternate implementation using dulwich
 
        includes = [x[1][0] for x in self._parsed_refs.iteritems()
 
                    if x[1][1] != 'T']
 
        return [c.commit.id for c in self._repo.get_walker(include=includes)]
 

	
 
    def _get_revision(self, revision):
 
        """
 
        For git backend we always return integer here. This way we ensure
 
        that changset's revision attribute would become integer.
 
        """
 

	
 
        is_null = lambda o: len(o) == revision.count('0')
 

	
 
        if self._empty:
 
            raise EmptyRepositoryError("There are no changesets yet")
 

	
 
        if revision in (None, '', 'tip', 'HEAD', 'head', -1):
 
            return self.revisions[-1]
 

	
 
        is_bstr = isinstance(revision, (str, unicode))
 
        if ((is_bstr and revision.isdigit() and len(revision) < 12)
 
            or isinstance(revision, int) or is_null(revision)):
 
            try:
 
                revision = self.revisions[int(revision)]
 
            except Exception:
 
            except IndexError:
 
                msg = ("Revision %s does not exist for %s" % (revision, self))
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        elif is_bstr:
 
            # get by branch/tag name
 
            _ref_revision = self._parsed_refs.get(revision)
 
            if _ref_revision:  # and _ref_revision[1] in ['H', 'RH', 'T']:
 
                return _ref_revision[0]
 

	
 
            _tags_shas = self.tags.values()
 
            # maybe it's a tag ? we don't have them in self.revisions
 
            if revision in _tags_shas:
 
                return _tags_shas[_tags_shas.index(revision)]
 

	
 
            elif not SHA_PATTERN.match(revision) or revision not in self.revisions:
 
                msg = ("Revision %s does not exist for %s" % (revision, self))
 
                raise ChangesetDoesNotExistError(msg)
 

	
 
        # Ensure we return full id
 
        if not SHA_PATTERN.match(str(revision)):
 
            raise ChangesetDoesNotExistError("Given revision %s not recognized"
 
                % revision)
 
        return revision
 

	
 
    def get_ref_revision(self, ref_type, ref_name):
 
        """
 
        Returns ``MercurialChangeset`` object representing repository's
 
        changeset at the given ``revision``.
 
        """
 
        return self._get_revision(ref_name)
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
                yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        url = str(url)
 
        if url != 'default' and not '://' in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    def get_hook_location(self):
 
        """
kallithea/lib/vcs/backends/hg/repository.py
Show inline comments
 
@@ -299,103 +299,98 @@ class MercurialRepository(BaseRepository
 
            handlers.extend((httpbasicauthhandler(passmgr),
 
                             httpdigestauthhandler(passmgr)))
 

	
 
        o = urllib2.build_opener(*handlers)
 
        o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
 
                        ('Accept', 'application/mercurial-0.1')]
 

	
 
        q = {"cmd": 'between'}
 
        q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
 
        qs = '?%s' % urllib.urlencode(q)
 
        cu = "%s%s" % (test_uri, qs)
 
        req = urllib2.Request(cu, None, {})
 

	
 
        try:
 
            resp = o.open(req)
 
            if resp.code != 200:
 
                raise Exception('Return Code is not 200')
 
        except Exception, e:
 
            # means it cannot be cloned
 
            raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
 

	
 
        # now check if it's a proper hg repo
 
        try:
 
            httppeer(repoui or ui.ui(), url).lookup('tip')
 
        except Exception, e:
 
            raise urllib2.URLError(
 
                "url [%s] does not look like an hg repo org_exc: %s"
 
                % (cleaned_uri, e))
 

	
 
        return True
 

	
 
    def _get_repo(self, create, src_url=None, update_after_clone=False):
 
        """
 
        Function will check for mercurial repository in given path and return
 
        a localrepo object. If there is no repository in that path it will
 
        raise an exception unless ``create`` parameter is set to True - in
 
        that case repository would be created and returned.
 
        If ``src_url`` is given, would try to clone repository from the
 
        location at given clone_point. Additionally it'll make update to
 
        working copy accordingly to ``update_after_clone`` flag
 
        """
 

	
 
        try:
 
            if src_url:
 
                url = str(self._get_url(src_url))
 
                opts = {}
 
                if not update_after_clone:
 
                    opts.update({'noupdate': True})
 
                try:
 
                    MercurialRepository._check_url(url, self.baseui)
 
                    clone(self.baseui, url, self.path, **opts)
 
#                except urllib2.URLError:
 
#                    raise Abort("Got HTTP 404 error")
 
                except Exception:
 
                    raise
 

	
 
                # Don't try to create if we've already cloned repo
 
                create = False
 
            return localrepository(self.baseui, self.path, create=create)
 
        except (Abort, RepoError), err:
 
            if create:
 
                msg = "Cannot create repository at %s. Original error was %s"\
 
                    % (self.path, err)
 
            else:
 
                msg = "Not valid repository at %s. Original error was %s"\
 
                    % (self.path, err)
 
            raise RepositoryError(msg)
 

	
 
    @LazyProperty
 
    def in_memory_changeset(self):
 
        return MercurialInMemoryChangeset(self)
 

	
 
    @LazyProperty
 
    def description(self):
 
        undefined_description = u'unknown'
 
        _desc = self._repo.ui.config('web', 'description', None, untrusted=True)
 
        return safe_unicode(_desc or undefined_description)
 

	
 
    @LazyProperty
 
    def contact(self):
 
        undefined_contact = u'Unknown'
 
        return safe_unicode(get_contact(self._repo.ui.config)
 
                            or undefined_contact)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            #fallback to filesystem
 
            cl_path = os.path.join(self.path, '.hg', "00changelog.i")
 
            st_path = os.path.join(self.path, '.hg', "store")
 
            if os.path.exists(cl_path):
 
                return os.stat(cl_path).st_mtime
 
            else:
 
                return os.stat(st_path).st_mtime
 

	
kallithea/lib/vcs/nodes.py
Show inline comments
 
@@ -295,99 +295,99 @@ class FileNode(Node):
 
    @LazyProperty
 
    def size(self):
 
        if self.changeset:
 
            return self.changeset.get_file_size(self.path)
 
        raise NodeError("Cannot retrieve size of the file without related "
 
            "changeset attribute")
 

	
 
    @LazyProperty
 
    def message(self):
 
        if self.changeset:
 
            return self.last_changeset.message
 
        raise NodeError("Cannot retrieve message of the file without related "
 
            "changeset attribute")
 

	
 
    @LazyProperty
 
    def last_changeset(self):
 
        if self.changeset:
 
            return self.changeset.get_file_changeset(self.path)
 
        raise NodeError("Cannot retrieve last changeset of the file without "
 
            "related changeset attribute")
 

	
 
    def get_mimetype(self):
 
        """
 
        Mimetype is calculated based on the file's content. If ``_mimetype``
 
        attribute is available, it will be returned (backends which store
 
        mimetypes or can easily recognize them, should set this private
 
        attribute to indicate that type should *NOT* be calculated).
 
        """
 
        if hasattr(self, '_mimetype'):
 
            if (isinstance(self._mimetype, (tuple, list,)) and
 
                len(self._mimetype) == 2):
 
                return self._mimetype
 
            else:
 
                raise NodeError('given _mimetype attribute must be an 2 '
 
                                'element list or tuple')
 

	
 
        mtype, encoding = mimetypes.guess_type(self.name)
 

	
 
        if mtype is None:
 
            if self.is_binary:
 
                mtype = 'application/octet-stream'
 
                encoding = None
 
            else:
 
                mtype = 'text/plain'
 
                encoding = None
 

	
 
                #try with pygments
 
                try:
 
                    from pygments.lexers import get_lexer_for_filename
 
                    mt = get_lexer_for_filename(self.name).mimetypes
 
                except Exception:
 
                    from pygments import lexers
 
                    mt = lexers.get_lexer_for_filename(self.name).mimetypes
 
                except lexers.ClassNotFound:
 
                    mt = None
 

	
 
                if mt:
 
                    mtype = mt[0]
 

	
 
        return mtype, encoding
 

	
 
    @LazyProperty
 
    def mimetype(self):
 
        """
 
        Wrapper around full mimetype info. It returns only type of fetched
 
        mimetype without the encoding part. use get_mimetype function to fetch
 
        full set of (type,encoding)
 
        """
 
        return self.get_mimetype()[0]
 

	
 
    @LazyProperty
 
    def mimetype_main(self):
 
        return self.mimetype.split('/')[0]
 

	
 
    @LazyProperty
 
    def lexer(self):
 
        """
 
        Returns pygment's lexer class. Would try to guess lexer taking file's
 
        content, name and mimetype.
 
        """
 
        from pygments import lexers
 
        try:
 
            lexer = lexers.guess_lexer_for_filename(self.name, self.content, stripnl=False)
 
        except lexers.ClassNotFound:
 
            lexer = lexers.TextLexer(stripnl=False)
 
        # returns first alias
 
        return lexer
 

	
 
    @LazyProperty
 
    def lexer_alias(self):
 
        """
 
        Returns first alias of the lexer guessed for this file.
 
        """
 
        return self.lexer.aliases[0]
 

	
 
    @LazyProperty
 
    def history(self):
 
        """
 
        Returns a list of changeset for this file in which the file was changed
 
        """
 
        if self.changeset is None:
 
            raise NodeError('Unable to get changeset for this FileNode')
kallithea/lib/vcs/subprocessio.py
Show inline comments
 
"""
 
Module provides a class allowing to wrap communication over subprocess.Popen
 
input, output, error streams into a meaningfull, non-blocking, concurrent
 
stream processor exposing the output data as an iterator fitting to be a
 
return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
 

	
 
Copyright (c) 2011  Daniel Dotsenko <dotsa[at]hotmail.com>
 

	
 
This file is part of git_http_backend.py Project.
 

	
 
git_http_backend.py Project is free software: you can redistribute it and/or
 
modify it under the terms of the GNU Lesser General Public License as
 
published by the Free Software Foundation, either version 2.1 of the License,
 
or (at your option) any later version.
 

	
 
git_http_backend.py Project is distributed in the hope that it will be useful,
 
but WITHOUT ANY WARRANTY; without even the implied warranty of
 
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
 
GNU Lesser General Public License for more details.
 

	
 
You should have received a copy of the GNU Lesser General Public License
 
along with git_http_backend.py Project.
 
If not, see <http://www.gnu.org/licenses/>.
 
"""
 
import os
 
import subprocess
 
from kallithea.lib.vcs.utils.compat import deque, Event, Thread, _bytes, _bytearray
 

	
 

	
 
class StreamFeeder(Thread):
 
    """
 
    Normal writing into pipe-like is blocking once the buffer is filled.
 
    This thread allows a thread to seep data from a file-like into a pipe
 
    without blocking the main thread.
 
    We close inpipe once the end of the source stream is reached.
 
    """
 

	
 
    def __init__(self, source):
 
        super(StreamFeeder, self).__init__()
 
        self.daemon = True
 
        filelike = False
 
        self.bytes = _bytes()
 
        if type(source) in (type(''), _bytes, _bytearray):  # string-like
 
            self.bytes = _bytes(source)
 
        else:  # can be either file pointer or file-like
 
            if type(source) in (int, long):  # file pointer it is
 
                ## converting file descriptor (int) stdin into file-like
 
                try:
 
                    source = os.fdopen(source, 'rb', 16384)
 
                except Exception:
 
                    pass
 
            # let's see if source is file-like by now
 
            try:
 
                filelike = source.read
 
            except Exception:
 
                pass
 
            filelike = hasattr(source, 'read')
 
        if not filelike and not self.bytes:
 
            raise TypeError("StreamFeeder's source object must be a readable "
 
                            "file-like, a file descriptor, or a string-like.")
 
        self.source = source
 
        self.readiface, self.writeiface = os.pipe()
 

	
 
    def run(self):
 
        t = self.writeiface
 
        if self.bytes:
 
            os.write(t, self.bytes)
 
        else:
 
            s = self.source
 
            b = s.read(4096)
 
            while b:
 
                os.write(t, b)
 
                b = s.read(4096)
 
        os.close(t)
 

	
 
    @property
 
    def output(self):
 
        return self.readiface
 

	
 

	
 
class InputStreamChunker(Thread):
 
    def __init__(self, source, target, buffer_size, chunk_size):
 

	
 
        super(InputStreamChunker, self).__init__()
 

	
 
        self.daemon = True  # die die die.
 

	
 
        self.source = source
 
        self.target = target
 
        self.chunk_count_max = int(buffer_size / chunk_size) + 1
 
        self.chunk_size = chunk_size
 

	
 
        self.data_added = Event()
 
        self.data_added.clear()
 

	
 
        self.keep_reading = Event()
 
        self.keep_reading.set()
 

	
 
        self.EOF = Event()
 
        self.EOF.clear()
 

	
 
        self.go = Event()
 
        self.go.set()
 

	
 
    def stop(self):
kallithea/model/api_key.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.api_key
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
api key model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Sep 8, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
from __future__ import with_statement
 
import time
 
import logging
 
import traceback
 
from sqlalchemy import or_
 

	
 
from kallithea.lib.utils2 import generate_api_key
 
from kallithea.model import BaseModel
 
from kallithea.model.db import UserApiKeys
 
from kallithea.model.meta import Session
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class ApiKeyModel(BaseModel):
 
    cls = UserApiKeys
 

	
 
    def create(self, user, description, lifetime=-1):
 
        """
 
        :param user: user or user_id
 
        :param description: description of ApiKey
 
        :param lifetime: expiration time in seconds
 
        """
 
        user = self._get_user(user)
 

	
 
        new_api_key = UserApiKeys()
 
        new_api_key.api_key = generate_api_key(user.username)
 
        new_api_key.user_id = user.user_id
 
        new_api_key.description = description
 
        new_api_key.expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
 
        Session().add(new_api_key)
 

	
 
        return new_api_key
 

	
 
    def delete(self, api_key, user=None):
 
        """
 
        Deletes given api_key, if user is set it also filters the object for
 
        deletion by given user.
 
        """
 
        api_key = UserApiKeys.query().filter(UserApiKeys.api_key == api_key)
 

	
 
        if user:
 
            user = self._get_user(user)
 
            api_key = api_key.filter(UserApiKeys.user_id == user.user_id)
 

	
 
        api_key = api_key.scalar()
 
        try:
 
            Session().delete(api_key)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def get_api_keys(self, user, show_expired=True):
 
        user = self._get_user(user)
 
        user_api_keys = UserApiKeys.query()\
 
            .filter(UserApiKeys.user_id == user.user_id)
 
        if not show_expired:
 
            user_api_keys = user_api_keys\
 
                .filter(or_(UserApiKeys.expires == -1,
 
                            UserApiKeys.expires >= time.time()))
 
        return user_api_keys
kallithea/model/db.py
Show inline comments
 
@@ -1221,97 +1221,97 @@ class Repository(Base, BaseModel):
 
            lock_time = time.time()
 
        repo.locked = [user_id, lock_time]
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def unlock(cls, repo):
 
        repo.locked = None
 
        Session().add(repo)
 
        Session().commit()
 

	
 
    @classmethod
 
    def getlock(cls, repo):
 
        return repo.locked
 

	
 
    @property
 
    def last_db_change(self):
 
        return self.updated_on
 

	
 
    @property
 
    def clone_uri_hidden(self):
 
        clone_uri = self.clone_uri
 
        if clone_uri:
 
            import urlobject
 
            url_obj = urlobject.URLObject(self.clone_uri)
 
            if url_obj.password:
 
                clone_uri = url_obj.with_password('*****')
 
        return clone_uri
 

	
 
    def clone_url(self, **override):
 
        import kallithea.lib.helpers as h
 
        qualified_home_url = h.canonical_url('home')
 

	
 
        uri_tmpl = None
 
        if 'with_id' in override:
 
            uri_tmpl = self.DEFAULT_CLONE_URI_ID
 
            del override['with_id']
 

	
 
        if 'uri_tmpl' in override:
 
            uri_tmpl = override['uri_tmpl']
 
            del override['uri_tmpl']
 

	
 
        # we didn't override our tmpl from **overrides
 
        if not uri_tmpl:
 
            uri_tmpl = self.DEFAULT_CLONE_URI
 
            try:
 
                from pylons import tmpl_context as c
 
                uri_tmpl = c.clone_uri_tmpl
 
            except Exception:
 
            except AttributeError:
 
                # in any case if we call this outside of request context,
 
                # ie, not having tmpl_context set up
 
                pass
 

	
 
        return get_clone_url(uri_tmpl=uri_tmpl,
 
                             qualifed_home_url=qualified_home_url,
 
                             repo_name=self.repo_name,
 
                             repo_id=self.repo_id, **override)
 

	
 
    def set_state(self, state):
 
        self.repo_state = state
 
        Session().add(self)
 
    #==========================================================================
 
    # SCM PROPERTIES
 
    #==========================================================================
 

	
 
    def get_changeset(self, rev=None):
 
        return get_changeset_safe(self.scm_instance, rev)
 

	
 
    def get_landing_changeset(self):
 
        """
 
        Returns landing changeset, or if that doesn't exist returns the tip
 
        """
 
        _rev_type, _rev = self.landing_rev
 
        cs = self.get_changeset(_rev)
 
        if isinstance(cs, EmptyChangeset):
 
            return self.get_changeset()
 
        return cs
 

	
 
    def update_changeset_cache(self, cs_cache=None):
 
        """
 
        Update cache of last changeset for repository, keys should be::
 

	
 
            short_id
 
            raw_id
 
            revision
 
            message
 
            date
 
            author
 

	
 
        :param cs_cache:
 
        """
 
        from kallithea.lib.vcs.backends.base import BaseChangeset
 
        if cs_cache is None:
 
            cs_cache = EmptyChangeset()
 
            # use no-cache version here
 
            scm_repo = self.scm_instance_no_cache()
 
            if scm_repo:
 
@@ -2046,138 +2046,130 @@ class CacheInvalidation(Base, BaseModel)
 
        prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
 
        return prefix, repo_name, suffix
 

	
 
    def get_prefix(self):
 
        """
 
        get prefix that might have been used in _get_cache_key to
 
        generate self.cache_key. Only used for informational purposes
 
        in repo_edit.html.
 
        """
 
        # prefix, repo_name, suffix
 
        return self._cache_key_partition()[0]
 

	
 
    def get_suffix(self):
 
        """
 
        get suffix that might have been used in _get_cache_key to
 
        generate self.cache_key. Only used for informational purposes
 
        in repo_edit.html.
 
        """
 
        # prefix, repo_name, suffix
 
        return self._cache_key_partition()[2]
 

	
 
    @classmethod
 
    def clear_cache(cls):
 
        """
 
        Delete all cache keys from database.
 
        Should only be run when all instances are down and all entries thus stale.
 
        """
 
        cls.query().delete()
 
        Session().commit()
 

	
 
    @classmethod
 
    def _get_cache_key(cls, key):
 
        """
 
        Wrapper for generating a unique cache key for this instance and "key".
 
        key must / will start with a repo_name which will be stored in .cache_args .
 
        """
 
        import kallithea
 
        prefix = kallithea.CONFIG.get('instance_id', '')
 
        return "%s%s" % (prefix, key)
 

	
 
    @classmethod
 
    def set_invalidate(cls, repo_name, delete=False):
 
        """
 
        Mark all caches of a repo as invalid in the database.
 
        """
 
        inv_objs = Session().query(cls).filter(cls.cache_args == repo_name).all()
 
        log.debug('for repo %s got %s invalidation objects'
 
                  % (safe_str(repo_name), inv_objs))
 
        try:
 

	
 
            for inv_obj in inv_objs:
 
                log.debug('marking %s key for invalidation based on repo_name=%s'
 
                          % (inv_obj, safe_str(repo_name)))
 
                if delete:
 
                    Session().delete(inv_obj)
 
                else:
 
                    inv_obj.cache_active = False
 
                    Session().add(inv_obj)
 
            Session().commit()
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 

	
 
    @classmethod
 
    def test_and_set_valid(cls, repo_name, kind, valid_cache_keys=None):
 
        """
 
        Mark this cache key as active and currently cached.
 
        Return True if the existing cache registration still was valid.
 
        Return False to indicate that it had been invalidated and caches should be refreshed.
 
        """
 

	
 
        key = (repo_name + '_' + kind) if kind else repo_name
 
        cache_key = cls._get_cache_key(key)
 

	
 
        if valid_cache_keys and cache_key in valid_cache_keys:
 
            return True
 

	
 
        try:
 
            inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
 
            if not inv_obj:
 
                inv_obj = CacheInvalidation(cache_key, repo_name)
 
            if inv_obj.cache_active:
 
                return True
 
            inv_obj.cache_active = True
 
            Session().add(inv_obj)
 
            Session().commit()
 
            return False
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            Session().rollback()
 
            return False
 

	
 
    @classmethod
 
    def get_valid_cache_keys(cls):
 
        """
 
        Return opaque object with information of which caches still are valid
 
        and can be used without checking for invalidation.
 
        """
 
        return set(inv_obj.cache_key for inv_obj in cls.query().filter(cls.cache_active).all())
 

	
 

	
 
class ChangesetComment(Base, BaseModel):
 
    __tablename__ = 'changeset_comments'
 
    __table_args__ = (
 
        Index('cc_revision_idx', 'revision'),
 
        {'extend_existing': True, 'mysql_engine': 'InnoDB',
 
         'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
 
    )
 
    comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
 
    repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    revision = Column('revision', String(40), nullable=True)
 
    pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 
    line_no = Column('line_no', Unicode(10), nullable=True)
 
    hl_lines = Column('hl_lines', Unicode(512), nullable=True)
 
    f_path = Column('f_path', Unicode(1000), nullable=True)
 
    user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    text = Column('text', UnicodeText(25000), nullable=False)
 
    created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    author = relationship('User', lazy='joined')
 
    repo = relationship('Repository')
 
    status_change = relationship('ChangesetStatus', cascade="all, delete-orphan")
 
    pull_request = relationship('PullRequest')
 

	
 
    @classmethod
 
    def get_users(cls, revision=None, pull_request_id=None):
 
        """
 
        Returns user associated with this ChangesetComment. ie those
 
        who actually commented
 

	
 
        :param cls:
 
        :param revision:
 
        """
 
        q = Session().query(User)\
 
                .join(ChangesetComment.author)
 
        if revision:
 
            q = q.filter(cls.revision == revision)
 
        elif pull_request_id:
kallithea/model/user.py
Show inline comments
 
@@ -46,393 +46,353 @@ from kallithea.model.meta import Session
 
log = logging.getLogger(__name__)
 

	
 

	
 
class UserModel(BaseModel):
 
    cls = User
 

	
 
    def get(self, user_id, cache=False):
 
        user = self.sa.query(User)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % user_id))
 
        return user.get(user_id)
 

	
 
    def get_user(self, user):
 
        return self._get_user(user)
 

	
 
    def get_by_username(self, username, cache=False, case_insensitive=False):
 

	
 
        if case_insensitive:
 
            user = self.sa.query(User).filter(User.username.ilike(username))
 
        else:
 
            user = self.sa.query(User)\
 
                .filter(User.username == username)
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % username))
 
        return user.scalar()
 

	
 
    def get_by_email(self, email, cache=False, case_insensitive=False):
 
        return User.get_by_email(email, case_insensitive, cache)
 

	
 
    def get_by_api_key(self, api_key, cache=False):
 
        return User.get_by_api_key(api_key, cache)
 

	
 
    def create(self, form_data, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.hooks import log_create_user, check_allowed_create_user
 
        _fd = form_data
 
        user_data = {
 
            'username': _fd['username'], 'password': _fd['password'],
 
            'email': _fd['email'], 'firstname': _fd['firstname'], 'lastname': _fd['lastname'],
 
            'active': _fd['active'], 'admin': False
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 

	
 
            new_user = User()
 
            for k, v in form_data.items():
 
                if k == 'password':
 
                    v = get_crypt_password(v)
 
                if k == 'firstname':
 
                    k = 'name'
 
                setattr(new_user, k, v)
 

	
 
            new_user.api_key = generate_api_key(form_data['username'])
 
            self.sa.add(new_user)
 

	
 
            log_create_user(new_user.get_dict(), cur_user)
 
            return new_user
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_or_update(self, username, password, email, firstname='',
 
                         lastname='', active=True, admin=False,
 
                         extern_type=None, extern_name=None, cur_user=None):
 
        """
 
        Creates a new instance if not found, or updates current one
 

	
 
        :param username:
 
        :param password:
 
        :param email:
 
        :param active:
 
        :param firstname:
 
        :param lastname:
 
        :param active:
 
        :param admin:
 
        :param extern_name:
 
        :param extern_type:
 
        :param cur_user:
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.auth import get_crypt_password, check_password
 
        from kallithea.lib.hooks import log_create_user, check_allowed_create_user
 
        user_data = {
 
            'username': username, 'password': password,
 
            'email': email, 'firstname': firstname, 'lastname': lastname,
 
            'active': active, 'admin': admin
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 

	
 
        log.debug('Checking for %s account in Kallithea database' % username)
 
        user = User.get_by_username(username, case_insensitive=True)
 
        if user is None:
 
            log.debug('creating new user %s' % username)
 
            new_user = User()
 
            edit = False
 
        else:
 
            log.debug('updating user %s' % username)
 
            new_user = user
 
            edit = True
 

	
 
        try:
 
            new_user.username = username
 
            new_user.admin = admin
 
            new_user.email = email
 
            new_user.active = active
 
            new_user.extern_name = safe_unicode(extern_name) if extern_name else None
 
            new_user.extern_type = safe_unicode(extern_type) if extern_type else None
 
            new_user.name = firstname
 
            new_user.lastname = lastname
 

	
 
            if not edit:
 
                new_user.api_key = generate_api_key(username)
 

	
 
            # set password only if creating an user or password is changed
 
            password_change = new_user.password and not check_password(password,
 
                                                            new_user.password)
 
            if not edit or password_change:
 
                reason = 'new password' if edit else 'new user'
 
                log.debug('Updating password reason=>%s' % (reason,))
 
                new_user.password = get_crypt_password(password) if password else None
 

	
 
            self.sa.add(new_user)
 

	
 
            if not edit:
 
                log_create_user(new_user.get_dict(), cur_user)
 
            return new_user
 
        except (DatabaseError,):
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_registration(self, form_data):
 
        from kallithea.model.notification import NotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        try:
 
            form_data['admin'] = False
 
            form_data['extern_name'] = EXTERN_TYPE_INTERNAL
 
            form_data['extern_type'] = EXTERN_TYPE_INTERNAL
 
            new_user = self.create(form_data)
 

	
 
            self.sa.add(new_user)
 
            self.sa.flush()
 

	
 
            # notification to admins
 
            subject = _('New user registration')
 
            body = ('New user registration\n'
 
                    '---------------------\n'
 
                    '- Username: %s\n'
 
                    '- Full Name: %s\n'
 
                    '- Email: %s\n')
 
            body = body % (new_user.username, new_user.full_name, new_user.email)
 
            edit_url = h.canonical_url('edit_user', id=new_user.user_id)
 
            email_kwargs = {'registered_user_url': edit_url, 'new_username': new_user.username}
 
            NotificationModel().create(created_by=new_user, subject=subject,
 
                                       body=body, recipients=None,
 
                                       type_=Notification.TYPE_REGISTRATION,
 
                                       email_kwargs=email_kwargs)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update(self, user_id, form_data, skip_attrs=[]):
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 

	
 
            user = self.get(user_id, cache=False)
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                                _("You can't Edit this user since it's "
 
                                  "crucial for entire application"))
 

	
 
            for k, v in form_data.items():
 
                if k in skip_attrs:
 
                    continue
 
                if k == 'new_password' and v:
 
                    user.password = get_crypt_password(v)
 
                else:
 
                    # old legacy thing orm models store firstname as name,
 
                    # need proper refactor to username
 
                    if k == 'firstname':
 
                        k = 'name'
 
                    setattr(user, k, v)
 
            self.sa.add(user)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def update_user(self, user, **kwargs):
 
        from kallithea.lib.auth import get_crypt_password
 
        try:
 

	
 
            user = self._get_user(user)
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                    _("You can't Edit this user since it's"
 
                      " crucial for entire application")
 
                )
 

	
 
            for k, v in kwargs.items():
 
                if k == 'password' and v:
 
                    v = get_crypt_password(v)
 

	
 
                setattr(user, k, v)
 
            self.sa.add(user)
 
            return user
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def delete(self, user, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 
        user = self._get_user(user)
 

	
 
        try:
 
            if user.username == User.DEFAULT_USER:
 
                raise DefaultUserException(
 
                    _(u"You can't remove this user since it's"
 
                      " crucial for entire application")
 
                )
 
            if user.repositories:
 
                repos = [x.repo_name for x in user.repositories]
 
                raise UserOwnsReposException(
 
                    _(u'user "%s" still owns %s repositories and cannot be '
 
                      'removed. Switch owners or remove those repositories. %s')
 
                    % (user.username, len(repos), ', '.join(repos))
 
                )
 
            self.sa.delete(user)
 

	
 
            from kallithea.lib.hooks import log_delete_user
 
            log_delete_user(user.get_dict(), cur_user)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def reset_password_link(self, data):
 
        from kallithea.lib.celerylib import tasks, run_task
 
        from kallithea.model.notification import EmailNotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        user_email = data['email']
 
        try:
 
            user = User.get_by_email(user_email)
 
            if user:
 
                log.debug('password reset user found %s' % user)
 
                link = h.canonical_url('reset_password_confirmation', key=user.api_key)
 
                reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
 
                body = EmailNotificationModel().get_email_tmpl(reg_type,
 
                                                               'txt',
 
                                                               user=user.short_contact,
 
                                                               reset_url=link)
 
                html_body = EmailNotificationModel().get_email_tmpl(reg_type,
 
                                                               'html',
 
                                                               user=user.short_contact,
 
                                                               reset_url=link)
 
                log.debug('sending email')
 
                run_task(tasks.send_email, [user_email],
 
                         _("Password reset link"), body, html_body)
 
                log.info('send new password mail to %s' % user_email)
 
            else:
 
                log.debug("password reset email %s not found" % user_email)
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            return False
 

	
 
        return True
 

	
 
    def reset_password(self, data):
 
        from kallithea.lib.celerylib import tasks, run_task
 
        from kallithea.lib import auth
 
        user_email = data['email']
 
        pre_db = True
 
        try:
 
            user = User.get_by_email(user_email)
 
            new_passwd = auth.PasswordGenerator().gen_password(8,
 
                            auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
 
            if user:
 
                user.password = auth.get_crypt_password(new_passwd)
 
                Session().add(user)
 
                Session().commit()
 
                log.info('change password for %s' % user_email)
 
            if new_passwd is None:
 
                raise Exception('unable to generate new password')
 

	
 
            pre_db = False
 
            run_task(tasks.send_email, [user_email],
 
                     _('Your new password'),
 
                     _('Your new Kallithea password:%s') % (new_passwd,))
 
            log.info('send new password mail to %s' % user_email)
 

	
 
        except Exception:
 
            log.error('Failed to update user password')
 
            log.error(traceback.format_exc())
 
            if pre_db:
 
                # we rollback only if local db stuff fails. If it goes into
 
                # run_task, we're pass rollback state this wouldn't work then
 
                Session().rollback()
 

	
 
        return True
 

	
 
    def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
 
        """
 
        Fetches auth_user by user_id,or api_key if present.
 
        Fills auth_user attributes with those taken from database.
 
        Additionally sets is_authenitated if lookup fails
 
        present in database
 

	
 
        :param auth_user: instance of user to set attributes
 
        :param user_id: user id to fetch by
 
        :param api_key: api key to fetch by
 
        :param username: username to fetch by
 
        """
 
        if user_id is None and api_key is None and username is None:
 
            raise Exception('You need to pass user_id, api_key or username')
 

	
 
        try:
 
            dbuser = None
 
            if user_id:
 
                dbuser = self.get(user_id)
 
            elif api_key:
 
                dbuser = self.get_by_api_key(api_key)
 
            elif username:
 
                dbuser = self.get_by_username(username)
 

	
 
            if dbuser is not None and dbuser.active:
 
                log.debug('filling %s data' % dbuser)
 
                for k, v in dbuser.get_dict().iteritems():
 
                    if k not in ['api_keys', 'permissions']:
 
                        setattr(auth_user, k, v)
 
            else:
 
            return True
 
                return False
 

	
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            auth_user.is_authenticated = False
 
            return False
 

	
 
        return True
 

	
 
    def has_perm(self, user, perm):
 
        perm = self._get_perm(perm)
 
        user = self._get_user(user)
 

	
 
        return UserToPerm.query().filter(UserToPerm.user == user)\
 
            .filter(UserToPerm.permission == perm).scalar() is not None
 

	
 
    def grant_perm(self, user, perm):
 
        """
 
        Grant user global permissions
 

	
 
        :param user:
 
        :param perm:
 
        """
 
        user = self._get_user(user)
 
        perm = self._get_perm(perm)
 
        # if this permission is already granted skip it
 
        _perm = UserToPerm.query()\
 
            .filter(UserToPerm.user == user)\
 
            .filter(UserToPerm.permission == perm)\
 
            .scalar()
 
        if _perm:
 
            return
 
        new = UserToPerm()
 
        new.user = user
 
        new.permission = perm
 
        self.sa.add(new)
 
        return new
 

	
 
    def revoke_perm(self, user, perm):
 
        """
 
        Revoke users global permissions
 

	
 
        :param user:
 
        :param perm:
 
        """
 
        user = self._get_user(user)
 
        perm = self._get_perm(perm)
 

	
 
        obj = UserToPerm.query()\
 
                .filter(UserToPerm.user == user)\
 
                .filter(UserToPerm.permission == perm)\
 
                .scalar()
 
        if obj:
 
            self.sa.delete(obj)
 

	
 
    def add_extra_email(self, user, email):
 
        """
kallithea/model/validators.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
Set of generic validators
 
"""
 

	
 
import os
 
import re
 
import formencode
 
import logging
 
from collections import defaultdict
 
from pylons.i18n.translation import _
 
from webhelpers.pylonslib.secure_form import authentication_token
 
import sqlalchemy
 

	
 
from formencode.validators import (
 
    UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
 
    NotEmpty, IPAddress, CIDR, String, FancyValidator
 
)
 
from kallithea.lib.compat import OrderedSet
 
from kallithea.lib import ipaddr
 
from kallithea.lib.utils import repo_name_slug
 
from kallithea.lib.utils2 import safe_int, str2bool, aslist
 
from kallithea.model.db import RepoGroup, Repository, UserGroup, User,\
 
    ChangesetStatus
 
from kallithea.lib.exceptions import LdapImportError
 
from kallithea.config.routing import ADMIN_PREFIX
 
from kallithea.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
 

	
 
# silence warnings and pylint
 
UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
 
    NotEmpty, IPAddress, CIDR, String, FancyValidator
 

	
 
log = logging.getLogger(__name__)
 

	
 
class _Missing(object):
 
    pass
 

	
 
Missing = _Missing()
 

	
 

	
 
class StateObj(object):
 
    """
 
    this is needed to translate the messages using _() in validators
 
    """
 
    _ = staticmethod(_)
 

	
 

	
 
def M(self, key, state=None, **kwargs):
 
    """
 
    returns string from self.message based on given key,
 
    passed kw params are used to substitute %(named)s params inside
 
    translated strings
 

	
 
    :param msg:
 
    :param state:
 
    """
 
    if state is None:
 
        state = StateObj()
 
    else:
 
        state._ = staticmethod(_)
 
    #inject validator into state object
 
@@ -97,97 +98,97 @@ def UniqueListFromString():
 
def ValidUsername(edit=False, old_data={}):
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'username_exists': _(u'Username "%(username)s" already exists'),
 
            'system_invalid_username':
 
                _(u'Username "%(username)s" is forbidden'),
 
            'invalid_username':
 
                _(u'Username may only contain alphanumeric characters '
 
                    'underscores, periods or dashes and must begin with '
 
                    'alphanumeric character or underscore')
 
        }
 

	
 
        def validate_python(self, value, state):
 
            if value in ['default', 'new_user']:
 
                msg = M(self, 'system_invalid_username', state, username=value)
 
                raise formencode.Invalid(msg, value, state)
 
            #check if user is unique
 
            old_un = None
 
            if edit:
 
                old_un = User.get(old_data.get('user_id')).username
 

	
 
            if old_un != value or not edit:
 
                if User.get_by_username(value, case_insensitive=True):
 
                    msg = M(self, 'username_exists', state, username=value)
 
                    raise formencode.Invalid(msg, value, state)
 

	
 
            if re.match(r'^[a-zA-Z0-9\_]{1}[a-zA-Z0-9\-\_\.]*$', value) is None:
 
                msg = M(self, 'invalid_username', state)
 
                raise formencode.Invalid(msg, value, state)
 
    return _validator
 

	
 

	
 
def ValidRegex(msg=None):
 
    class _validator(formencode.validators.Regex):
 
        messages = dict(invalid=msg or _('The input is not valid'))
 
    return _validator
 

	
 

	
 
def ValidRepoUser():
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'invalid_username': _(u'Username %(username)s is not valid')
 
        }
 

	
 
        def validate_python(self, value, state):
 
            try:
 
                User.query().filter(User.active == True)\
 
                    .filter(User.username == value).one()
 
            except Exception:
 
            except sqlalchemy.exc.InvalidRequestError: # NoResultFound/MultipleResultsFound
 
                msg = M(self, 'invalid_username', state, username=value)
 
                raise formencode.Invalid(msg, value, state,
 
                    error_dict=dict(username=msg)
 
                )
 

	
 
    return _validator
 

	
 

	
 
def ValidUserGroup(edit=False, old_data={}):
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'invalid_group': _(u'Invalid user group name'),
 
            'group_exist': _(u'User group "%(usergroup)s" already exists'),
 
            'invalid_usergroup_name':
 
                _(u'user group name may only contain alphanumeric '
 
                  'characters underscores, periods or dashes and must begin '
 
                  'with alphanumeric character')
 
        }
 

	
 
        def validate_python(self, value, state):
 
            if value in ['default']:
 
                msg = M(self, 'invalid_group', state)
 
                raise formencode.Invalid(msg, value, state,
 
                    error_dict=dict(users_group_name=msg)
 
                )
 
            #check if group is unique
 
            old_ugname = None
 
            if edit:
 
                old_id = old_data.get('users_group_id')
 
                old_ugname = UserGroup.get(old_id).users_group_name
 

	
 
            if old_ugname != value or not edit:
 
                is_existing_group = UserGroup.get_by_group_name(value,
 
                                                        case_insensitive=True)
 
                if is_existing_group:
 
                    msg = M(self, 'group_exist', state, usergroup=value)
 
                    raise formencode.Invalid(msg, value, state,
 
                        error_dict=dict(users_group_name=msg)
 
                    )
 

	
 
            if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
 
                msg = M(self, 'invalid_usergroup_name', state)
 
                raise formencode.Invalid(msg, value, state,
 
                    error_dict=dict(users_group_name=msg)
 
                )
 

	
 
    return _validator
 

	
kallithea/tests/functional/test_admin_repos.py
Show inline comments
 
@@ -34,453 +34,453 @@ class _BaseTest(TestController):
 
    REPO_TYPE = None
 
    NEW_REPO = None
 
    OTHER_TYPE_REPO = None
 
    OTHER_TYPE = None
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        pass
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(url('repos'))
 

	
 
    def test_create(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 

	
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name)
 
        Session().commit()
 

	
 
    def test_create_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (self.NEW_REPO, non_ascii)
 
        repo_name_unicode = repo_name.decode('utf8')
 
        description = 'description for newly created repo' + non_ascii
 
        description_unicode = description.decode('utf8')
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.quote(repo_name), repo_name_unicode))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_unicode)
 
        self.assertEqual(new_repo.description, description_unicode)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
    def test_create_in_group(self):
 
        self.log_user()
 

	
 
        ## create GROUP
 
        group_name = 'sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description='test',
 
                                     owner=TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 

	
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        inherited_perms = UserRepoToPerm.query()\
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        self.assertEqual(len(inherited_perms), 1)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()
 

	
 
    def test_create_in_group_without_needed_permissions(self):
 
        usr = self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
 
        # revoke
 
        user_model = UserModel()
 
        # disable fork and create on default user
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.create.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.create.none')
 
        user_model.revoke_perm(User.DEFAULT_USER, 'hg.fork.repository')
 
        user_model.grant_perm(User.DEFAULT_USER, 'hg.fork.none')
 

	
 
        # disable on regular user
 
        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.repository')
 
        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.create.none')
 
        user_model.revoke_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.repository')
 
        user_model.grant_perm(TEST_USER_REGULAR_LOGIN, 'hg.fork.none')
 
        Session().commit()
 

	
 
        ## create GROUP
 
        group_name = 'reg_sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description='test',
 
                                     owner=TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 

	
 
        group_name_allowed = 'reg_sometest_allowed_%s' % self.REPO_TYPE
 
        gr_allowed = RepoGroupModel().create(group_name=group_name_allowed,
 
                                     group_description='test',
 
                                     owner=TEST_USER_REGULAR_LOGIN)
 
        Session().commit()
 

	
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,))
 

	
 
        response.mustcontain('Invalid value')
 

	
 
        # user is allowed to create in this group
 
        repo_name = 'ingroup'
 
        repo_name_full = RepoGroup.url_sep().join([group_name_allowed, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr_allowed.group_id,))
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        inherited_perms = UserRepoToPerm.query()\
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        self.assertEqual(len(inherited_perms), 1)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        RepoGroupModel().delete(group_name_allowed)
 
        Session().commit()
 

	
 
    def test_create_in_group_inherit_permissions(self):
 
        self.log_user()
 

	
 
        ## create GROUP
 
        group_name = 'sometest_%s' % self.REPO_TYPE
 
        gr = RepoGroupModel().create(group_name=group_name,
 
                                     group_description='test',
 
                                     owner=TEST_USER_ADMIN_LOGIN)
 
        perm = Permission.get_by_key('repository.write')
 
        RepoGroupModel().grant_user_permission(gr, TEST_USER_REGULAR_LOGIN, perm)
 

	
 
        ## add repo permissions
 
        Session().commit()
 

	
 
        repo_name = 'ingroup_inherited_%s' % self.REPO_TYPE
 
        repo_name_full = RepoGroup.url_sep().join([group_name, repo_name])
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                repo_group=gr.group_id,
 
                                                repo_copy_permissions=True))
 

	
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name_full))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name_full, repo_name_full))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_full).one()
 
        new_repo_id = new_repo.repo_id
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_full)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name_full))
 
        response.mustcontain(repo_name_full)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name_full))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            RepoGroupModel().delete(group_name)
 
            Session().commit()
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        #check if inherited permissiona are applied
 
        inherited_perms = UserRepoToPerm.query()\
 
            .filter(UserRepoToPerm.repository_id == new_repo_id).all()
 
        self.assertEqual(len(inherited_perms), 2)
 

	
 
        self.assertTrue(TEST_USER_REGULAR_LOGIN in [x.user.username
 
                                                    for x in inherited_perms])
 
        self.assertTrue('repository.write' in [x.permission.permission_name
 
                                               for x in inherited_perms])
 

	
 
        RepoModel().delete(repo_name_full)
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()
 

	
 
    def test_create_remote_repo_wrong_clone_uri(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                clone_uri='http://127.0.0.1/repo'))
 
        response.mustcontain('invalid clone url')
 

	
 

	
 
    def test_create_remote_repo_wrong_clone_uri_hg_svn(self):
 
        self.log_user()
 
        repo_name = self.NEW_REPO
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description,
 
                                                clone_uri='svn+http://127.0.0.1/repo'))
 
        response.mustcontain('invalid clone url')
 

	
 

	
 
    def test_delete(self):
 
        self.log_user()
 
        repo_name = 'vcs_test_new_to_delete_%s' % self.REPO_TYPE
 
        description = 'description for newly created repo'
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_name=repo_name,
 
                                                repo_description=description))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.checkSessionFlash(response,
 
                               'Created repository <a href="/%s">%s</a>'
 
                               % (repo_name, repo_name))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name)
 
        self.assertEqual(new_repo.description, description)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.delete(url('repo', repo_name=repo_name))
 

	
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name))
 

	
 
        response.follow()
 

	
 
        #check if repo was deleted from db
 
        deleted_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name).scalar()
 

	
 
        self.assertEqual(deleted_repo, None)
 

	
 
        self.assertEqual(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)),
 
                                  False)
 

	
 
    def test_delete_non_ascii(self):
 
        self.log_user()
 
        non_ascii = "ąęł"
 
        repo_name = "%s%s" % (self.NEW_REPO, non_ascii)
 
        repo_name_unicode = repo_name.decode('utf8')
 
        description = 'description for newly created repo' + non_ascii
 
        description_unicode = description.decode('utf8')
 
        response = self.app.post(url('repos'),
 
                        fixture._get_repo_create_params(repo_private=False,
 
                                                repo_name=repo_name,
 
                                                repo_type=self.REPO_TYPE,
 
                                                repo_description=description))
 
        ## run the check page that triggers the flash message
 
        response = self.app.get(url('repo_check_home', repo_name=repo_name))
 
        self.assertEqual(response.json, {u'result': True})
 
        self.checkSessionFlash(response,
 
                               u'Created repository <a href="/%s">%s</a>'
 
                               % (urllib.quote(repo_name), repo_name_unicode))
 
        # test if the repo was created in the database
 
        new_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).one()
 

	
 
        self.assertEqual(new_repo.repo_name, repo_name_unicode)
 
        self.assertEqual(new_repo.description, description_unicode)
 

	
 
        # test if the repository is visible in the list ?
 
        response = self.app.get(url('summary_home', repo_name=repo_name))
 
        response.mustcontain(repo_name)
 
        response.mustcontain(self.REPO_TYPE)
 

	
 
        # test if the repository was created on filesystem
 
        try:
 
            vcs.get_repo(os.path.join(TESTS_TMP_PATH, repo_name))
 
        except Exception:
 
        except vcs.exceptions.VCSError:
 
            self.fail('no repo %s in filesystem' % repo_name)
 

	
 
        response = self.app.delete(url('repo', repo_name=repo_name))
 
        self.checkSessionFlash(response, 'Deleted repository %s' % (repo_name_unicode))
 
        response.follow()
 

	
 
        #check if repo was deleted from db
 
        deleted_repo = Session().query(Repository)\
 
            .filter(Repository.repo_name == repo_name_unicode).scalar()
 

	
 
        self.assertEqual(deleted_repo, None)
 

	
 
        self.assertEqual(os.path.isdir(os.path.join(TESTS_TMP_PATH, repo_name)),
 
                                  False)
 

	
 
    def test_delete_repo_with_group(self):
 
        #TODO:
 
        pass
 

	
 
    def test_delete_browser_fakeout(self):
 
        response = self.app.post(url('repo', repo_name=self.REPO),
 
                                 params=dict(_method='delete'))
 

	
 
    def test_show(self):
 
        self.log_user()
 
        response = self.app.get(url('repo', repo_name=self.REPO))
 

	
 
    def test_edit(self):
 
        response = self.app.get(url('edit_repo', repo_name=self.REPO))
 

	
 
    def test_set_private_flag_sets_default_to_none(self):
 
        self.log_user()
 
        #initially repository perm should be read
 
        perm = _get_permission_for_user(user='default', repo=self.REPO)
 
        self.assertTrue(len(perm), 1)
 
        self.assertEqual(perm[0].permission.permission_name, 'repository.read')
 
        self.assertEqual(Repository.get_by_repo_name(self.REPO).private, False)
 

	
 
        response = self.app.put(url('repo', repo_name=self.REPO),
 
                        fixture._get_repo_create_params(repo_private=1,
 
                                                repo_name=self.REPO,
 
                                                repo_type=self.REPO_TYPE,
 
                                                user=TEST_USER_ADMIN_LOGIN))
 
        self.checkSessionFlash(response,
 
                               msg='Repository %s updated successfully' % (self.REPO))
 
        self.assertEqual(Repository.get_by_repo_name(self.REPO).private, True)
 

	
 
        #now the repo default permission should be None
kallithea/tests/scripts/test_concurency.py
Show inline comments
 
@@ -155,68 +155,68 @@ def get_anonymous_access():
 
    sa = get_session()
 
    return sa.query(User).filter(User.username == 'default').one().active
 

	
 

	
 
#==============================================================================
 
# TESTS
 
#==============================================================================
 
def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD,
 
                                seq=None, backend='hg'):
 
    cwd = path = jn(TESTS_TMP_PATH, repo)
 

	
 
    if seq is None:
 
        seq = _RandomNameSequence().next()
 

	
 
    try:
 
        shutil.rmtree(path, ignore_errors=True)
 
        os.makedirs(path)
 
        #print 'made dirs %s' % jn(path)
 
    except OSError:
 
        raise
 

	
 
    clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \
 
                  {'user': USER,
 
                   'pass': PASS,
 
                   'host': HOST,
 
                   'cloned_repo': repo, }
 

	
 
    dest = path + seq
 
    if method == 'pull':
 
        stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url)
 
    else:
 
        stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest)
 
        print stdout,'sdasdsadsa'
 
        if not no_errors:
 
            if backend == 'hg':
 
                assert """adding file changes""" in stdout, 'no messages about cloning'
 
                assert """abort""" not in stderr , 'got error from clone'
 
            elif backend == 'git':
 
                assert """Cloning into""" in stdout, 'no messages about cloning'
 

	
 
if __name__ == '__main__':
 
    try:
 
        create_test_user(force=False)
 
        seq = None
 
        import time
 

	
 
        try:
 
            METHOD = sys.argv[3]
 
        except Exception:
 
        except IndexError:
 
            pass
 

	
 
        try:
 
            backend = sys.argv[4]
 
        except Exception:
 
        except IndexError:
 
            backend = 'hg'
 

	
 
        if METHOD == 'pull':
 
            seq = _RandomNameSequence().next()
 
            test_clone_with_credentials(repo=sys.argv[1], method='clone',
 
                                        seq=seq, backend=backend)
 
        s = time.time()
 
        for i in range(1, int(sys.argv[2]) + 1):
 
            print 'take', i
 
            test_clone_with_credentials(repo=sys.argv[1], method=METHOD,
 
                                        seq=seq, backend=backend)
 
        print 'time taken %.3f' % (time.time() - s)
 
    except Exception, e:
 
        sys.exit('stop on %s' % e)
0 comments (0 inline, 0 general)