Changeset - e63bcce18fef
[Not reviewed]
docs/conf.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
#
 
# Kallithea documentation build configuration file, created by
 
# sphinx-quickstart on Sun Oct 10 16:46:37 2010.
 
#
 
# This file is execfile()d with the current directory set to its containing dir.
 
#
 
# Note that not all possible configuration values are present in this
 
# autogenerated file.
 
#
 
# All configuration values have a default; values that are commented out
 
# serve to show the default.
 

	
 
import os
 
import sys
 

	
 
from kallithea import __version__
 

	
 

	
 
# If extensions (or modules to document with autodoc) are in another directory,
 
# add these directories to sys.path here. If the directory is relative to the
 
# documentation root, use os.path.abspath to make it absolute, like shown here.
 
sys.path.insert(0, os.path.abspath('..'))
 

	
 
# -- General configuration -----------------------------------------------------
 

	
 
# If your documentation needs a minimal Sphinx version, state it here.
 
#needs_sphinx = '1.0'
 

	
 
# Add any Sphinx extension module names here, as strings. They can be extensions
 
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
 
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest',
 
              'sphinx.ext.intersphinx', 'sphinx.ext.todo',
 
              'sphinx.ext.viewcode']
 

	
 
# Add any paths that contain templates here, relative to this directory.
 
templates_path = ['_templates']
 

	
 
# The suffix of source filenames.
 
source_suffix = '.rst'
 

	
 
# The encoding of source files.
 
#source_encoding = 'utf-8-sig'
 

	
 
# The master toctree document.
 
master_doc = 'index'
 

	
 
# General information about the project.
 
project = u'Kallithea'
 
copyright = u'2010-2020 by various authors, licensed as GPLv3.'
 
project = 'Kallithea'
 
copyright = '2010-2020 by various authors, licensed as GPLv3.'
 

	
 
# The version info for the project you're documenting, acts as replacement for
 
# |version| and |release|, also used in various other places throughout the
 
# built documents.
 
#
 
# The short X.Y version.
 
root = os.path.dirname(os.path.dirname(__file__))
 
sys.path.append(root)
 
version = __version__
 
# The full version, including alpha/beta/rc tags.
 
release = __version__
 

	
 
# The language for content autogenerated by Sphinx. Refer to documentation
 
# for a list of supported languages.
 
#language = None
 

	
 
# There are two options for replacing |today|: either, you set today to some
 
# non-false value, then it is used:
 
#today = ''
 
# Else, today_fmt is used as the format for a strftime call.
 
#today_fmt = '%B %d, %Y'
 

	
 
# List of patterns, relative to source directory, that match files and
 
# directories to ignore when looking for source files.
 
exclude_patterns = ['_build']
 

	
 
# The reST default role (used for this markup: `text`) to use for all documents.
 
#default_role = None
 

	
 
# If true, '()' will be appended to :func: etc. cross-reference text.
 
#add_function_parentheses = True
 

	
 
# If true, the current module name will be prepended to all description
 
# unit titles (such as .. function::).
 
#add_module_names = True
 

	
 
# If true, sectionauthor and moduleauthor directives will be shown in the
 
# output. They are ignored by default.
 
#show_authors = False
 

	
 
# The name of the Pygments (syntax highlighting) style to use.
 
pygments_style = 'sphinx'
 
highlight_language = 'none'
 

	
 
# A list of ignored prefixes for module index sorting.
 
#modindex_common_prefix = []
 

	
 

	
 
@@ -142,88 +142,88 @@ html_theme_path = ['theme']
 
# Custom sidebar templates, maps document names to template names.
 
#html_sidebars = {}
 

	
 
# Additional templates that should be rendered to pages, maps page names to
 
# template names.
 
#html_additional_pages = {}
 

	
 
# If false, no module index is generated.
 
#html_domain_indices = True
 

	
 
# If false, no index is generated.
 
#html_use_index = True
 

	
 
# If true, the index is split into individual pages for each letter.
 
#html_split_index = False
 

	
 
# If true, links to the reST sources are added to the pages.
 
#html_show_sourcelink = True
 

	
 
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
 
#html_show_sphinx = True
 

	
 
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
 
#html_show_copyright = True
 

	
 
# If true, an OpenSearch description file will be output, and all pages will
 
# contain a <link> tag referring to it.  The value of this option must be the
 
# base URL from which the finished HTML is served.
 
#html_use_opensearch = ''
 

	
 
# This is the file name suffix for HTML files (e.g. ".xhtml").
 
#html_file_suffix = None
 

	
 
# Output file base name for HTML help builder.
 
htmlhelp_basename = 'Kallithea-docs'
 

	
 

	
 
# -- Options for LaTeX output --------------------------------------------------
 

	
 
# The paper size ('letter' or 'a4').
 
#latex_paper_size = 'letter'
 

	
 
# The font size ('10pt', '11pt' or '12pt').
 
#latex_font_size = '10pt'
 

	
 
# Grouping the document tree into LaTeX files. List of tuples
 
# (source start file, target name, title, author, documentclass [howto/manual]).
 
latex_documents = [
 
  ('index', 'Kallithea.tex', u'Kallithea Documentation',
 
   u'Kallithea Developers', 'manual'),
 
  ('index', 'Kallithea.tex', 'Kallithea Documentation',
 
   'Kallithea Developers', 'manual'),
 
]
 

	
 
# The name of an image file (relative to this directory) to place at the top of
 
# the title page.
 
#latex_logo = None
 

	
 
# For "manual" documents, if this is true, then toplevel headings are parts,
 
# not chapters.
 
#latex_use_parts = False
 

	
 
# If true, show page references after internal links.
 
#latex_show_pagerefs = False
 

	
 
# If true, show URL addresses after external links.
 
#latex_show_urls = False
 

	
 
# Additional stuff for the LaTeX preamble.
 
#latex_preamble = ''
 

	
 
# Documents to append as an appendix to all manuals.
 
#latex_appendices = []
 

	
 
# If false, no module index is generated.
 
#latex_domain_indices = True
 

	
 

	
 
# -- Options for manual page output --------------------------------------------
 

	
 
# One entry per manual page. List of tuples
 
# (source start file, name, description, authors, manual section).
 
man_pages = [
 
    ('index', 'kallithea', u'Kallithea Documentation',
 
     [u'Kallithea Developers'], 1)
 
    ('index', 'kallithea', 'Kallithea Documentation',
 
     ['Kallithea Developers'], 1)
 
]
 

	
 

	
 
# Example configuration for intersphinx: refer to the Python standard library.
 
intersphinx_mapping = {'http://docs.python.org/': None}
kallithea/controllers/api/api.py
Show inline comments
 
@@ -388,97 +388,97 @@ class ApiController(JSONRPCController):
 
                                       "hg.register.manual_activate"],
 
                            "repositories": {"repo1": "repository.none"},
 
                            "repositories_groups": {"Group1": "group.read"}
 
                         },
 
                    }
 

	
 
            error:  null
 

	
 
        """
 
        if not HasPermissionAny('hg.admin')():
 
            # make sure normal user does not pass someone else userid,
 
            # he is not allowed to do that
 
            if not isinstance(userid, Optional) and userid != request.authuser.user_id:
 
                raise JSONRPCError(
 
                    'userid is not the same as your user'
 
                )
 

	
 
        if isinstance(userid, Optional):
 
            userid = request.authuser.user_id
 

	
 
        user = get_user_or_error(userid)
 
        data = user.get_api_data()
 
        data['permissions'] = AuthUser(user_id=user.user_id).permissions
 
        return data
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def get_users(self):
 
        """
 
        Lists all existing users. This command can be executed only using api_key
 
        belonging to user with admin rights.
 

	
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result: [<user_object>, ...]
 
            error:  null
 
        """
 

	
 
        return [
 
            user.get_api_data()
 
            for user in User.query()
 
                .order_by(User.username)
 
                .filter_by(is_default_user=False)
 
        ]
 

	
 
    @HasPermissionAnyDecorator('hg.admin')
 
    def create_user(self, username, email, password=Optional(''),
 
                    firstname=Optional(u''), lastname=Optional(u''),
 
                    firstname=Optional(''), lastname=Optional(''),
 
                    active=Optional(True), admin=Optional(False),
 
                    extern_type=Optional(User.DEFAULT_AUTH_TYPE),
 
                    extern_name=Optional('')):
 
        """
 
        Creates new user. Returns new user object. This command can
 
        be executed only using api_key belonging to user with admin rights.
 

	
 
        :param username: new username
 
        :type username: str or int
 
        :param email: email
 
        :type email: str
 
        :param password: password
 
        :type password: Optional(str)
 
        :param firstname: firstname
 
        :type firstname: Optional(str)
 
        :param lastname: lastname
 
        :type lastname: Optional(str)
 
        :param active: active
 
        :type active: Optional(bool)
 
        :param admin: admin
 
        :type admin: Optional(bool)
 
        :param extern_name: name of extern
 
        :type extern_name: Optional(str)
 
        :param extern_type: extern_type
 
        :type extern_type: Optional(str)
 

	
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result: {
 
                      "msg" : "created new user `<username>`",
 
                      "user": <user_obj>
 
                    }
 
            error:  null
 

	
 
        ERROR OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : null
 
          error :  {
 
            "user `<username>` already exist"
 
            or
 
            "email `<email>` already exist"
 
            or
 
            "failed to create user `<username>`"
 
          }
 

	
 
@@ -641,97 +641,97 @@ class ApiController(JSONRPCController):
 
        belonging to user with admin rights or user who has at least
 
        read access to user group.
 

	
 
        :param usergroupid: id of user_group to edit
 
        :type usergroupid: str or int
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result : None if group not exist
 
                     {
 
                       "users_group_id" : "<id>",
 
                       "group_name" :     "<groupname>",
 
                       "active":          "<bool>",
 
                       "members" :  [<user_obj>,...]
 
                     }
 
            error : null
 

	
 
        """
 
        user_group = get_user_group_or_error(usergroupid)
 
        if not HasPermissionAny('hg.admin')():
 
            if not HasUserGroupPermissionLevel('read')(user_group.users_group_name):
 
                raise JSONRPCError('user group `%s` does not exist' % (usergroupid,))
 

	
 
        data = user_group.get_api_data()
 
        return data
 

	
 
    # permission check inside
 
    def get_user_groups(self):
 
        """
 
        Lists all existing user groups. This command can be executed only using
 
        api_key belonging to user with admin rights or user who has at least
 
        read access to user group.
 

	
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result : [<user_group_obj>,...]
 
            error : null
 
        """
 

	
 
        return [
 
            user_group.get_api_data()
 
            for user_group in UserGroupList(UserGroup.query().all(), perm_level='read')
 
        ]
 

	
 
    @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
 
    def create_user_group(self, group_name, description=Optional(u''),
 
    def create_user_group(self, group_name, description=Optional(''),
 
                          owner=Optional(OAttr('apiuser')), active=Optional(True)):
 
        """
 
        Creates new user group. This command can be executed only using api_key
 
        belonging to user with admin rights or an user who has create user group
 
        permission
 

	
 
        :param group_name: name of new user group
 
        :type group_name: str
 
        :param description: group description
 
        :type description: str
 
        :param owner: owner of group. If not passed apiuser is the owner
 
        :type owner: Optional(str or int)
 
        :param active: group is active
 
        :type active: Optional(bool)
 

	
 
        OUTPUT::
 

	
 
            id : <id_given_in_input>
 
            result: {
 
                      "msg": "created new user group `<groupname>`",
 
                      "user_group": <user_group_object>
 
                    }
 
            error:  null
 

	
 
        ERROR OUTPUT::
 

	
 
          id : <id_given_in_input>
 
          result : null
 
          error :  {
 
            "user group `<group name>` already exist"
 
            or
 
            "failed to create group `<group name>`"
 
          }
 

	
 
        """
 

	
 
        if UserGroupModel().get_by_name(group_name):
 
            raise JSONRPCError("user group `%s` already exist" % (group_name,))
 

	
 
        try:
 
            if isinstance(owner, Optional):
 
                owner = request.authuser.user_id
 

	
 
            owner = get_user_or_error(owner)
 
            active = Optional.extract(active)
 
            description = Optional.extract(description)
 
            ug = UserGroupModel().create(name=group_name, description=description,
 
                                         owner=owner, active=active)
 
@@ -2328,96 +2328,96 @@ class ApiController(JSONRPCController):
 
        repo = get_repo_or_error(repoid)
 
        if not HasRepoPermissionLevel('read')(repo.repo_name):
 
            raise JSONRPCError('Access denied to repo %s' % repo.repo_name)
 

	
 
        format = "%Y-%m-%dT%H:%M:%S"
 
        try:
 
            return [e.__json__(with_file_list) for e in
 
                repo.scm_instance.get_changesets(start,
 
                                                 end,
 
                                                 datetime.strptime(start_date, format) if start_date else None,
 
                                                 datetime.strptime(end_date, format) if end_date else None,
 
                                                 branch_name,
 
                                                 reverse, max_revisions)]
 
        except EmptyRepositoryError as e:
 
            raise JSONRPCError('Repository is empty')
 

	
 
    # permission check inside
 
    def get_changeset(self, repoid, raw_id, with_reviews=Optional(False)):
 
        repo = get_repo_or_error(repoid)
 
        if not HasRepoPermissionLevel('read')(repo.repo_name):
 
            raise JSONRPCError('Access denied to repo %s' % repo.repo_name)
 
        changeset = repo.get_changeset(raw_id)
 
        if isinstance(changeset, EmptyChangeset):
 
            raise JSONRPCError('Changeset %s does not exist' % raw_id)
 

	
 
        info = dict(changeset.as_dict())
 

	
 
        with_reviews = Optional.extract(with_reviews)
 
        if with_reviews:
 
            reviews = ChangesetStatusModel().get_statuses(
 
                                repo.repo_name, raw_id)
 
            info["reviews"] = reviews
 

	
 
        return info
 

	
 
    # permission check inside
 
    def get_pullrequest(self, pullrequest_id):
 
        """
 
        Get given pull request by id
 
        """
 
        pull_request = PullRequest.get(pullrequest_id)
 
        if pull_request is None:
 
            raise JSONRPCError('pull request `%s` does not exist' % (pullrequest_id,))
 
        if not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name):
 
            raise JSONRPCError('not allowed')
 
        return pull_request.get_api_data()
 

	
 
    # permission check inside
 
    def comment_pullrequest(self, pull_request_id, comment_msg=u'', status=None, close_pr=False):
 
    def comment_pullrequest(self, pull_request_id, comment_msg='', status=None, close_pr=False):
 
        """
 
        Add comment, close and change status of pull request.
 
        """
 
        apiuser = get_user_or_error(request.authuser.user_id)
 
        pull_request = PullRequest.get(pull_request_id)
 
        if pull_request is None:
 
            raise JSONRPCError('pull request `%s` does not exist' % (pull_request_id,))
 
        if (not HasRepoPermissionLevel('read')(pull_request.org_repo.repo_name)):
 
            raise JSONRPCError('No permission to add comment. User needs at least reading permissions'
 
                               ' to the source repository.')
 
        owner = apiuser.user_id == pull_request.owner_id
 
        reviewer = apiuser.user_id in [reviewer.user_id for reviewer in pull_request.reviewers]
 
        if close_pr and not (apiuser.admin or owner):
 
            raise JSONRPCError('No permission to close pull request. User needs to be admin or owner.')
 
        if status and not (apiuser.admin or owner or reviewer):
 
            raise JSONRPCError('No permission to change pull request status. User needs to be admin, owner or reviewer.')
 
        if pull_request.is_closed():
 
            raise JSONRPCError('pull request is already closed')
 

	
 
        comment = ChangesetCommentsModel().create(
 
            text=comment_msg,
 
            repo=pull_request.org_repo.repo_id,
 
            author=apiuser.user_id,
 
            pull_request=pull_request.pull_request_id,
 
            f_path=None,
 
            line_no=None,
 
            status_change=ChangesetStatus.get_status_lbl(status),
 
            closing_pr=close_pr
 
        )
 
        action_logger(apiuser,
 
                      'user_commented_pull_request:%s' % pull_request_id,
 
                      pull_request.org_repo, request.ip_addr)
 
        if status:
 
            ChangesetStatusModel().set_status(
 
                pull_request.org_repo_id,
 
                status,
 
                apiuser.user_id,
 
                comment,
 
                pull_request=pull_request_id
 
            )
 
        if close_pr:
 
            PullRequestModel().close_pull_request(pull_request_id)
 
            action_logger(apiuser,
 
                          'user_closed_pull_request:%s' % pull_request_id,
 
                          pull_request.org_repo, request.ip_addr)
 
        Session().commit()
 
        return True
kallithea/controllers/search.py
Show inline comments
 
@@ -49,94 +49,94 @@ log = logging.getLogger(__name__)
 
class SearchController(BaseRepoController):
 

	
 
    @LoginRequired(allow_default_user=True)
 
    def index(self, repo_name=None):
 
        c.repo_name = repo_name
 
        c.formated_results = []
 
        c.runtime = ''
 
        c.cur_query = request.GET.get('q', None)
 
        c.cur_type = request.GET.get('type', 'content')
 
        c.cur_search = search_type = {'content': 'content',
 
                                      'commit': 'message',
 
                                      'path': 'path',
 
                                      'repository': 'repository'
 
                                      }.get(c.cur_type, 'content')
 

	
 
        index_name = {
 
            'content': IDX_NAME,
 
            'commit': CHGSET_IDX_NAME,
 
            'path': IDX_NAME
 
        }.get(c.cur_type, IDX_NAME)
 

	
 
        schema_defn = {
 
            'content': SCHEMA,
 
            'commit': CHGSETS_SCHEMA,
 
            'path': SCHEMA
 
        }.get(c.cur_type, SCHEMA)
 

	
 
        log.debug('IDX: %s', index_name)
 
        log.debug('SCHEMA: %s', schema_defn)
 

	
 
        if c.cur_query:
 
            cur_query = c.cur_query.lower()
 
            log.debug(cur_query)
 

	
 
        if c.cur_query:
 
            p = safe_int(request.GET.get('page'), 1)
 
            highlight_items = set()
 
            index_dir = config['index_dir']
 
            try:
 
                if not exists_in(index_dir, index_name):
 
                    raise EmptyIndexError
 
                idx = open_dir(index_dir, indexname=index_name)
 
                searcher = idx.searcher()
 

	
 
                qp = QueryParser(search_type, schema=schema_defn)
 
                if c.repo_name:
 
                    # use "repository_rawname:" instead of "repository:"
 
                    # for case-sensitive matching
 
                    cur_query = u'repository_rawname:%s %s' % (c.repo_name, cur_query)
 
                    cur_query = 'repository_rawname:%s %s' % (c.repo_name, cur_query)
 
                try:
 
                    query = qp.parse(cur_query)
 
                    # extract words for highlight
 
                    if isinstance(query, Phrase):
 
                        highlight_items.update(query.words)
 
                    elif isinstance(query, Prefix):
 
                        highlight_items.add(query.text)
 
                    else:
 
                        for i in query.all_terms():
 
                            if i[0] in ['content', 'message']:
 
                                highlight_items.add(i[1])
 

	
 
                    matcher = query.matcher(searcher)
 

	
 
                    log.debug('query: %s', query)
 
                    log.debug('hl terms: %s', highlight_items)
 
                    results = searcher.search(query)
 
                    res_ln = len(results)
 
                    c.runtime = '%s results (%.3f seconds)' % (
 
                        res_ln, results.runtime
 
                    )
 

	
 
                    repo_location = RepoModel().repos_path
 
                    c.formated_results = Page(
 
                        WhooshResultWrapper(search_type, searcher, matcher,
 
                                            highlight_items, repo_location),
 
                        page=p,
 
                        item_count=res_ln,
 
                        items_per_page=10,
 
                        type=c.cur_type,
 
                        q=c.cur_query,
 
                    )
 

	
 
                except QueryParserError:
 
                    c.runtime = _('Invalid search query. Try quoting it.')
 
                searcher.close()
 
            except EmptyIndexError:
 
                log.error("Empty search index - run 'kallithea-cli index-create' regularly")
 
                c.runtime = _('The server has no search index.')
 
            except Exception:
 
                log.error(traceback.format_exc())
 
                c.runtime = _('An error occurred during search operation.')
 

	
 
        # Return a rendered template
 
        return render('/search/search.html')
kallithea/lib/auth_modules/__init__.py
Show inline comments
 
@@ -241,101 +241,101 @@ class KallitheaExternalAuthPlugin(Kallit
 
    def _authenticate(self, userobj, username, passwd, settings, **kwargs):
 
        user_data = super(KallitheaExternalAuthPlugin, self)._authenticate(
 
            userobj, username, passwd, settings, **kwargs)
 
        if user_data is not None:
 
            if userobj is None: # external authentication of unknown user that will be created soon
 
                def_user_perms = AuthUser(dbuser=User.get_default_user()).permissions['global']
 
                active = 'hg.extern_activate.auto' in def_user_perms
 
            else:
 
                active = userobj.active
 

	
 
            if self.use_fake_password():
 
                # Randomize the PW because we don't need it, but don't want
 
                # them blank either
 
                passwd = PasswordGenerator().gen_password(length=8)
 

	
 
            log.debug('Updating or creating user info from %s plugin',
 
                      self.name)
 
            user = UserModel().create_or_update(
 
                username=user_data['username'],
 
                password=passwd,
 
                email=user_data["email"],
 
                firstname=user_data["firstname"],
 
                lastname=user_data["lastname"],
 
                active=active,
 
                admin=user_data["admin"],
 
                extern_name=user_data["extern_name"],
 
                extern_type=self.name,
 
            )
 
            # enforce user is just in given groups, all of them has to be ones
 
            # created from plugins. We store this info in _group_data JSON field
 
            groups = user_data['groups'] or []
 
            UserGroupModel().enforce_groups(user, groups, self.name)
 
            Session().commit()
 
        return user_data
 

	
 

	
 
def loadplugin(plugin):
 
    """
 
    Imports, instantiates, and returns the authentication plugin in the module named by plugin
 
    (e.g., plugin='kallithea.lib.auth_modules.auth_internal'). Returns an instance of the
 
    KallitheaAuthPluginBase subclass on success, raises exceptions on failure.
 

	
 
    raises:
 
        AttributeError -- no KallitheaAuthPlugin class in the module
 
        TypeError -- if the KallitheaAuthPlugin is not a subclass of ours KallitheaAuthPluginBase
 
        ImportError -- if we couldn't import the plugin at all
 
    """
 
    log.debug("Importing %s", plugin)
 
    if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'):
 
        parts = plugin.split(u'.lib.auth_modules.auth_', 1)
 
    if not plugin.startswith('kallithea.lib.auth_modules.auth_'):
 
        parts = plugin.split('.lib.auth_modules.auth_', 1)
 
        if len(parts) == 2:
 
            _module, pn = parts
 
            plugin = u'kallithea.lib.auth_modules.auth_' + pn
 
            plugin = 'kallithea.lib.auth_modules.auth_' + pn
 
    PLUGIN_CLASS_NAME = "KallitheaAuthPlugin"
 
    try:
 
        module = importlib.import_module(plugin)
 
    except (ImportError, TypeError):
 
        log.error(traceback.format_exc())
 
        # TODO: make this more error prone, if by some accident we screw up
 
        # the plugin name, the crash is pretty bad and hard to recover
 
        raise
 

	
 
    log.debug("Loaded auth plugin from %s (module:%s, file:%s)",
 
              plugin, module.__name__, module.__file__)
 

	
 
    pluginclass = getattr(module, PLUGIN_CLASS_NAME)
 
    if not issubclass(pluginclass, KallitheaAuthPluginBase):
 
        raise TypeError("Authentication class %s.KallitheaAuthPlugin is not "
 
                        "a subclass of %s" % (plugin, KallitheaAuthPluginBase))
 

	
 
    plugin = pluginclass()
 
    if plugin.plugin_settings.__func__ != KallitheaAuthPluginBase.plugin_settings:
 
        raise TypeError("Authentication class %s.KallitheaAuthPluginBase "
 
                        "has overridden the plugin_settings method, which is "
 
                        "forbidden." % plugin)
 
    return plugin
 

	
 

	
 
def get_auth_plugins():
 
    """Return a list of instances of plugins that are available and enabled"""
 
    auth_plugins = []
 
    for plugin_name in Setting.get_by_name("auth_plugins").app_settings_value:
 
        try:
 
            plugin = loadplugin(plugin_name)
 
        except Exception:
 
            log.exception('Failed to load authentication module %s' % (plugin_name))
 
        else:
 
            auth_plugins.append(plugin)
 
    return auth_plugins
 

	
 

	
 
def authenticate(username, password, environ=None):
 
    """
 
    Authentication function used for access control,
 
    It tries to authenticate based on enabled authentication modules.
 

	
 
    :param username: username can be empty for container auth
 
    :param password: password can be empty for container auth
 
    :param environ: environ headers passed for container auth
 
    :returns: None if auth failed, user_data dict if auth is correct
 
    """
kallithea/lib/db_manage.py
Show inline comments
 
@@ -340,83 +340,83 @@ class DbManage(object):
 
        return real_path
 

	
 
    def create_settings(self, repo_root_path):
 
        ui_config = [
 
            ('paths', '/', repo_root_path, True),
 
            #('phases', 'publish', 'false', False)
 
            ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False),
 
            ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True),
 
            ('extensions', 'largefiles', '', True),
 
            ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True),
 
            ('extensions', 'hgsubversion', '', False),
 
            ('extensions', 'hggit', '', False),
 
        ]
 
        for ui_section, ui_key, ui_value, ui_active in ui_config:
 
            ui_conf = Ui(
 
                ui_section=ui_section,
 
                ui_key=ui_key,
 
                ui_value=ui_value,
 
                ui_active=ui_active)
 
            self.sa.add(ui_conf)
 

	
 
        settings = [
 
            ('realm', 'Kallithea', 'unicode'),
 
            ('title', '', 'unicode'),
 
            ('ga_code', '', 'unicode'),
 
            ('show_public_icon', True, 'bool'),
 
            ('show_private_icon', True, 'bool'),
 
            ('stylify_metalabels', False, 'bool'),
 
            ('dashboard_items', 100, 'int'), # TODO: call it page_size
 
            ('admin_grid_items', 25, 'int'),
 
            ('show_version', True, 'bool'),
 
            ('use_gravatar', True, 'bool'),
 
            ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'),
 
            ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'),
 
            ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'),
 
        ]
 
        for key, val, type_ in settings:
 
            sett = Setting(key, val, type_)
 
            self.sa.add(sett)
 

	
 
        self.create_auth_plugin_options()
 
        self.create_default_options()
 

	
 
        log.info('Populated Ui and Settings defaults')
 

	
 
    def create_user(self, username, password, email='', admin=False):
 
        log.info('creating user %s', username)
 
        UserModel().create_or_update(username, password, email,
 
                                     firstname=u'Kallithea', lastname=u'Admin',
 
                                     firstname='Kallithea', lastname='Admin',
 
                                     active=True, admin=admin,
 
                                     extern_type=User.DEFAULT_AUTH_TYPE)
 

	
 
    def create_default_user(self):
 
        log.info('creating default user')
 
        # create default user for handling default permissions.
 
        user = UserModel().create_or_update(username=User.DEFAULT_USER,
 
                                            password=str(uuid.uuid1())[:20],
 
                                            email='anonymous@kallithea-scm.org',
 
                                            firstname=u'Anonymous',
 
                                            lastname=u'User')
 
                                            firstname='Anonymous',
 
                                            lastname='User')
 
        # based on configuration options activate/deactivate this user which
 
        # controls anonymous access
 
        if self.cli_args.get('public_access') is False:
 
            log.info('Public access disabled')
 
            user.active = False
 
            Session().commit()
 

	
 
    def create_permissions(self):
 
        """
 
        Creates all permissions defined in the system
 
        """
 
        # module.(access|create|change|delete)_[name]
 
        # module.(none|read|write|admin)
 
        log.info('creating permissions')
 
        PermissionModel().create_permissions()
 

	
 
    def populate_default_permissions(self):
 
        """
 
        Populate default permissions. It will create only the default
 
        permissions that are missing, and not alter already defined ones
 
        """
 
        log.info('creating default user permissions')
 
        PermissionModel().create_default_permissions(user=User.DEFAULT_USER)
kallithea/lib/feeds.py
Show inline comments
 
@@ -14,97 +14,97 @@
 

	
 
"""
 
kallithea.lib.feeds
 
~~~~~~~~~~~~~~~~~~~
 

	
 
Shared code for providing RSS and ATOM feeds.
 
"""
 

	
 
import datetime
 
import re
 

	
 
import mako.template
 

	
 

	
 
language = 'en-us'
 
ttl = "5"
 

	
 

	
 
# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
 
def rfc2822_date(date):
 
    # We do this ourselves to be timezone aware, email.Utils is not tz aware.
 
    if getattr(date, "tzinfo", False):
 
        time_str = date.strftime('%a, %d %b %Y %H:%M:%S ')
 
        offset = date.tzinfo.utcoffset(date)
 
        timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
 
        hour, minute = divmod(timezone, 60)
 
        return time_str + "%+03d%02d" % (hour, minute)
 
    else:
 
        return date.strftime('%a, %d %b %Y %H:%M:%S -0000')
 

	
 
# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
 
def rfc3339_date(date):
 
    if getattr(date, "tzinfo", False):
 
        time_str = date.strftime('%Y-%m-%dT%H:%M:%S')
 
        offset = date.tzinfo.utcoffset(date)
 
        timezone = (offset.days * 24 * 60) + (offset.seconds / 60)
 
        hour, minute = divmod(timezone, 60)
 
        return time_str + "%+03d:%02d" % (hour, minute)
 
    else:
 
        return date.strftime('%Y-%m-%dT%H:%M:%SZ')
 

	
 
# From ``django.utils.feedgenerator`` via webhelpers.feedgenerator
 
def get_tag_uri(url, date):
 
    "Creates a TagURI. See http://diveintomark.org/archives/2004/05/28/howto-atom-id"
 
    tag = re.sub('^http://', '', url)
 
    if date is not None:
 
        tag = re.sub('/', ',%s:/' % date.strftime('%Y-%m-%d'), tag, 1)
 
    tag = re.sub('#', '/', tag)
 
    return u'tag:' + tag
 
    return 'tag:' + tag
 

	
 

	
 
class Attributes(object):
 
    """Simple namespace for attribute dict access in mako and elsewhere"""
 
    def __init__(self, a_dict):
 
        self.__dict__ = a_dict
 

	
 

	
 
class _Feeder(object):
 

	
 
    content_type = None
 
    template = None  # subclass must provide a mako.template.Template
 

	
 
    @classmethod
 
    def render(cls, header, entries):
 
        try:
 
            latest_pubdate = max(
 
                pubdate for pubdate in (e.get('pubdate') for e in entries)
 
                if pubdate
 
            )
 
        except ValueError:  # max() arg is an empty sequence ... or worse
 
            latest_pubdate = datetime.datetime.now()
 

	
 
        return cls.template.render(
 
            language=language,
 
            ttl=ttl,  # rss only
 
            latest_pubdate=latest_pubdate,
 
            rfc2822_date=rfc2822_date,  # for RSS
 
            rfc3339_date=rfc3339_date,  # for Atom
 
            get_tag_uri=get_tag_uri,
 
            entries=[Attributes(e) for e in entries],
 
            **header
 
        )
 

	
 

	
 
class AtomFeed(_Feeder):
 

	
 
    content_type = 'application/atom+xml'
 

	
 
    template = mako.template.Template('''\
 
<?xml version="1.0" encoding="utf-8"?>
 
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="${language}">
 
  <title>${title}</title>
 
  <link href="${link}" rel="alternate"></link>
 
  <id>${link}</id>
 
  <updated>${rfc3339_date(latest_pubdate)}</updated>
 
  % for entry in entries:
 
  <entry>
kallithea/lib/indexers/__init__.py
Show inline comments
 
@@ -158,85 +158,85 @@ class WhooshResultWrapper(object):
 
    def __len__(self):
 
        return len(self.doc_ids)
 

	
 
    def __iter__(self):
 
        """
 
        Allows Iteration over results,and lazy generate content
 

	
 
        *Requires* implementation of ``__getitem__`` method.
 
        """
 
        for docid in self.doc_ids:
 
            yield self.get_full_content(docid)
 

	
 
    def __getitem__(self, key):
 
        """
 
        Slicing of resultWrapper
 
        """
 
        i, j = key.start, key.stop
 

	
 
        slices = []
 
        for docid in self.doc_ids[i:j]:
 
            slices.append(self.get_full_content(docid))
 
        return slices
 

	
 
    def get_full_content(self, docid):
 
        res = self.searcher.stored_fields(docid[0])
 
        log.debug('result: %s', res)
 
        if self.search_type == 'content':
 
            full_repo_path = os.path.join(self.repo_location, res['repository'])
 
            f_path = res['path'].split(full_repo_path)[-1]
 
            f_path = f_path.lstrip(os.sep)
 
            content_short = self.get_short_content(res, docid[1])
 
            res.update({'content_short': content_short,
 
                        'content_short_hl': self.highlight(content_short),
 
                        'f_path': f_path
 
            })
 
        elif self.search_type == 'path':
 
            full_repo_path = os.path.join(self.repo_location, res['repository'])
 
            f_path = res['path'].split(full_repo_path)[-1]
 
            f_path = f_path.lstrip(os.sep)
 
            res.update({'f_path': f_path})
 
        elif self.search_type == 'message':
 
            res.update({'message_hl': self.highlight(res['message'])})
 

	
 
        log.debug('result: %s', res)
 

	
 
        return res
 

	
 
    def get_short_content(self, res, chunks):
 
        return u''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
 
        return ''.join([res['content'][chunk[0]:chunk[1]] for chunk in chunks])
 

	
 
    def get_chunks(self):
 
        """
 
        Smart function that implements chunking the content
 
        but not overlap chunks so it doesn't highlight the same
 
        close occurrences twice.
 
        """
 
        memory = [(0, 0)]
 
        try:
 
            supports_positions = self.matcher.supports('positions')
 
        except AttributeError:  # 'NoneType' object has no attribute 'supports' (because matcher never get a format)
 
            supports_positions = False
 
        if supports_positions:
 
            for span in self.matcher.spans():
 
                start = span.startchar or 0
 
                end = span.endchar or 0
 
                start_offseted = max(0, start - self.fragment_size)
 
                end_offseted = end + self.fragment_size
 

	
 
                if start_offseted < memory[-1][1]:
 
                    start_offseted = memory[-1][1]
 
                memory.append((start_offseted, end_offseted,))
 
                yield (start_offseted, end_offseted,)
 

	
 
    def highlight(self, content, top=5):
 
        if self.search_type not in ['content', 'message']:
 
            return ''
 
        hl = whoosh_highlight(
 
            text=content,
 
            terms=self.highlight_items,
 
            analyzer=ANALYZER,
 
            fragmenter=FRAGMENTER,
 
            formatter=FORMATTER,
 
            top=top
 
        )
 
        return hl
kallithea/lib/indexers/daemon.py
Show inline comments
 
@@ -136,242 +136,242 @@ class WhooshIndexingDaemon(object):
 

	
 
        except RepositoryError:
 
            log.debug(traceback.format_exc())
 
            pass
 
        return index_paths_
 

	
 
    def get_node(self, repo, path, index_rev=None):
 
        """
 
        gets a filenode based on given full path.
 

	
 
        :param repo: scm repo instance
 
        :param path: full path including root location
 
        :return: FileNode
 
        """
 
        # FIXME: paths should be normalized ... or even better: don't include repo.path
 
        assert path.startswith(repo.path)
 
        assert path[len(repo.path)] in (os.path.sep, os.path.altsep)
 
        node_path = path[len(repo.path) + 1:]
 
        cs = self._get_index_changeset(repo, index_rev=index_rev)
 
        node = cs.get_node(node_path)
 
        return node
 

	
 
    def is_indexable_node(self, node):
 
        """
 
        Just index the content of chosen files, skipping binary files
 
        """
 
        return (node.extension in INDEX_EXTENSIONS or node.name in INDEX_FILENAMES) and \
 
               not node.is_binary
 

	
 
    def get_node_mtime(self, node):
 
        return mktime(node.last_changeset.date.timetuple())
 

	
 
    def add_doc(self, writer, path, repo, repo_name, index_rev=None):
 
        """
 
        Adding doc to writer this function itself fetches data from
 
        the instance of vcs backend
 
        """
 
        try:
 
            node = self.get_node(repo, path, index_rev)
 
        except (ChangesetError, NodeDoesNotExistError):
 
            log.debug("    >> %s - not found in %s %s", path, repo, index_rev)
 
            return 0, 0
 

	
 
        indexed = indexed_w_content = 0
 
        if self.is_indexable_node(node):
 
            bytes_content = node.content
 
            if b'\0' in bytes_content:
 
                log.warning('    >> %s - no text content', path)
 
                u_content = u''
 
                u_content = ''
 
            else:
 
                log.debug('    >> %s', path)
 
                u_content = safe_str(bytes_content)
 
                indexed_w_content += 1
 

	
 
        else:
 
            log.debug('    >> %s - not indexable', path)
 
            # just index file name without it's content
 
            u_content = u''
 
            u_content = ''
 
            indexed += 1
 

	
 
        writer.add_document(
 
            fileid=path,
 
            owner=repo.contact,
 
            repository_rawname=repo_name,
 
            repository=repo_name,
 
            path=path,
 
            content=u_content,
 
            modtime=self.get_node_mtime(node),
 
            extension=node.extension
 
        )
 
        return indexed, indexed_w_content
 

	
 
    def index_changesets(self, writer, repo_name, repo, start_rev=None):
 
        """
 
        Add all changeset in the vcs repo starting at start_rev
 
        to the index writer
 

	
 
        :param writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository from whence the
 
          changeset originates including the repository group
 
        :param repo: the vcs repository instance to index changesets for,
 
          the presumption is the repo has changesets to index
 
        :param start_rev=None: the full sha id to start indexing from
 
          if start_rev is None then index from the first changeset in
 
          the repo
 
        """
 

	
 
        if start_rev is None:
 
            start_rev = repo[0].raw_id
 

	
 
        log.debug('Indexing changesets in %s, starting at rev %s',
 
                  repo_name, start_rev)
 

	
 
        indexed = 0
 
        cs_iter = repo.get_changesets(start=start_rev)
 
        total = len(cs_iter)
 
        for cs in cs_iter:
 
            indexed += 1
 
            log.debug('    >> %s %s/%s', cs, indexed, total)
 
            writer.add_document(
 
                raw_id=cs.raw_id,
 
                owner=repo.contact,
 
                date=cs._timestamp,
 
                repository_rawname=repo_name,
 
                repository=repo_name,
 
                author=cs.author,
 
                message=cs.message,
 
                last=cs.last,
 
                added=u' '.join(node.path for node in cs.added).lower(),
 
                removed=u' '.join(node.path for node in cs.removed).lower(),
 
                changed=u' '.join(node.path for node in cs.changed).lower(),
 
                parents=u' '.join(cs.raw_id for cs in cs.parents),
 
                added=' '.join(node.path for node in cs.added).lower(),
 
                removed=' '.join(node.path for node in cs.removed).lower(),
 
                changed=' '.join(node.path for node in cs.changed).lower(),
 
                parents=' '.join(cs.raw_id for cs in cs.parents),
 
            )
 

	
 
        return indexed
 

	
 
    def index_files(self, file_idx_writer, repo_name, repo):
 
        """
 
        Index files for given repo_name
 

	
 
        :param file_idx_writer: the whoosh index writer to add to
 
        :param repo_name: name of the repository we're indexing
 
        :param repo: instance of vcs repo
 
        """
 
        i_cnt = iwc_cnt = 0
 
        log.debug('Building file index for %s @revision:%s', repo_name,
 
                                                self._get_index_revision(repo))
 
        index_rev = self._get_index_revision(repo)
 
        for idx_path in self.get_paths(repo):
 
            i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
 
            i_cnt += i
 
            iwc_cnt += iwc
 

	
 
        log.debug('added %s files %s with content for repo %s',
 
                  i_cnt + iwc_cnt, iwc_cnt, repo.path)
 
        return i_cnt, iwc_cnt
 

	
 
    def update_changeset_index(self):
 
        idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME)
 

	
 
        with idx.searcher() as searcher:
 
            writer = idx.writer()
 
            writer_is_dirty = False
 
            try:
 
                indexed_total = 0
 
                repo_name = None
 
                for repo_name, repo in sorted(self.repo_paths.items()):
 
                    log.debug('Updating changeset index for repo %s', repo_name)
 
                    # skip indexing if there aren't any revs in the repo
 
                    num_of_revs = len(repo)
 
                    if num_of_revs < 1:
 
                        continue
 

	
 
                    qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
 
                    q = qp.parse(u"last:t AND %s" % repo_name)
 
                    q = qp.parse("last:t AND %s" % repo_name)
 

	
 
                    results = searcher.search(q)
 

	
 
                    # default to scanning the entire repo
 
                    last_rev = 0
 
                    start_id = None
 

	
 
                    if len(results) > 0:
 
                        # assuming that there is only one result, if not this
 
                        # may require a full re-index.
 
                        start_id = results[0]['raw_id']
 
                        last_rev = repo.get_changeset(revision=start_id).revision
 

	
 
                    # there are new changesets to index or a new repo to index
 
                    if last_rev == 0 or num_of_revs > last_rev + 1:
 
                        # delete the docs in the index for the previous
 
                        # last changeset(s)
 
                        for hit in results:
 
                            q = qp.parse(u"last:t AND %s AND raw_id:%s" %
 
                            q = qp.parse("last:t AND %s AND raw_id:%s" %
 
                                            (repo_name, hit['raw_id']))
 
                            writer.delete_by_query(q)
 

	
 
                        # index from the previous last changeset + all new ones
 
                        indexed_total += self.index_changesets(writer,
 
                                                repo_name, repo, start_id)
 
                        writer_is_dirty = True
 
                log.debug('indexed %s changesets for repo %s',
 
                          indexed_total, repo_name
 
                )
 
            finally:
 
                if writer_is_dirty:
 
                    log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<')
 
                    writer.commit(merge=True)
 
                    log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<')
 
                else:
 
                    log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')
 

	
 
    def update_file_index(self):
 
        log.debug(u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
 
        log.debug('STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
 
                  'AND REPOS %s', INDEX_EXTENSIONS, ' and '.join(self.repo_paths))
 

	
 
        idx = open_dir(self.index_location, indexname=self.indexname)
 
        # The set of all paths in the index
 
        indexed_paths = set()
 
        # The set of all paths we need to re-index
 
        to_index = set()
 

	
 
        writer = idx.writer()
 
        writer_is_dirty = False
 
        try:
 
            with idx.reader() as reader:
 

	
 
                # Loop over the stored fields in the index
 
                for fields in reader.all_stored_fields():
 
                    indexed_path = fields['path']
 
                    indexed_repo_path = fields['repository']
 
                    indexed_paths.add(indexed_path)
 

	
 
                    if indexed_repo_path not in self.filtered_repo_update_paths:
 
                        continue
 

	
 
                    repo = self.repo_paths[indexed_repo_path]
 

	
 
                    try:
 
                        node = self.get_node(repo, indexed_path)
 
                        # Check if this file was changed since it was indexed
 
                        indexed_time = fields['modtime']
 
                        mtime = self.get_node_mtime(node)
 
                        if mtime > indexed_time:
 
                            # The file has changed, delete it and add it to
 
                            # the list of files to reindex
 
                            log.debug(
 
                                'adding to reindex list %s mtime: %s vs %s',
 
                                    indexed_path, mtime, indexed_time
 
                            )
 
                            writer.delete_by_term('fileid', indexed_path)
 
                            writer_is_dirty = True
 

	
 
                            to_index.add(indexed_path)
 
                    except (ChangesetError, NodeDoesNotExistError):
 
                        # This file was deleted since it was indexed
 
                        log.debug('removing from index %s', indexed_path)
 
                        writer.delete_by_term('path', indexed_path)
 
                        writer_is_dirty = True
 

	
 
            # Loop over the files in the filesystem
 
            # Assume we have a function that gathers the filenames of the
kallithea/lib/locale.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
import logging
 
import os
 
import sys
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
def current_locale_is_valid():
 
    """Verify that things work when Dulwich passes unicode paths to the file system layer.
 

	
 
    Note: UTF-8 is preferred, but for example ISO-8859-1 or mbcs should also
 
    work under the right circumstances."""
 
    try:
 
        u'\xe9'.encode(sys.getfilesystemencoding()) # Test using é (&eacute;)
 
        '\xe9'.encode(sys.getfilesystemencoding()) # Test using é (&eacute;)
 
    except UnicodeEncodeError:
 
        log.error("Cannot encode Unicode paths to file system encoding %r", sys.getfilesystemencoding())
 
        for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
 
            if var in os.environ:
 
                val = os.environ[var]
 
                log.error("Note: Environment variable %s is %r - perhaps change it to some other value from 'locale -a', like 'C.UTF-8' or 'en_US.UTF-8'", var, val)
 
                break
 
        else:
 
            log.error("Note: No locale setting found in environment variables - perhaps set LC_CTYPE to some value from 'locale -a', like 'C.UTF-8' or 'en_US.UTF-8'")
 
        return False
 
    return True
 

	
 
def get_current_locale():
 
    """Return the current locale based on environment variables.
 
    There does not seem to be a good (and functional) way to get it via Python.
 
    """
 
    for var in ['LC_ALL', 'LC_CTYPE', 'LANG']:
 
        val = os.environ.get(var)
 
        if val:
 
            log.debug('Determined current locale via environment variable %s (%s)', var, val)
 
            return val
 
    return None
kallithea/lib/middleware/pygrack.py
Show inline comments
 
@@ -120,97 +120,97 @@ class GitRepository(object):
 
        try:
 
            out = subprocessio.SubprocessIOChunker(cmd,
 
                starting_values=[ascii_bytes(packet_len + server_advert + '0000')]
 
            )
 
        except EnvironmentError as e:
 
            log.error(traceback.format_exc())
 
            raise exc.HTTPExpectationFailed()
 
        resp = Response()
 
        resp.content_type = 'application/x-%s-advertisement' % str(git_command)
 
        resp.charset = None
 
        resp.app_iter = out
 
        return resp
 

	
 
    def backend(self, req, environ):
 
        """
 
        WSGI Response producer for HTTP POST Git Smart HTTP requests.
 
        Reads commands and data from HTTP POST's body.
 
        returns an iterator obj with contents of git command's
 
        response to stdout
 
        """
 
        _git_path = kallithea.CONFIG.get('git_path', 'git')
 
        git_command = self._get_fixedpath(req.path_info)
 
        if git_command not in self.commands:
 
            log.debug('command %s not allowed', git_command)
 
            return exc.HTTPMethodNotAllowed()
 

	
 
        if 'CONTENT_LENGTH' in environ:
 
            inputstream = FileWrapper(environ['wsgi.input'],
 
                                      req.content_length)
 
        else:
 
            inputstream = environ['wsgi.input']
 

	
 
        gitenv = dict(os.environ)
 
        # forget all configs
 
        gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
 
        cmd = [_git_path, git_command[4:], '--stateless-rpc', self.content_path]
 
        log.debug('handling cmd %s', cmd)
 
        try:
 
            out = subprocessio.SubprocessIOChunker(
 
                cmd,
 
                inputstream=inputstream,
 
                env=gitenv,
 
                cwd=self.content_path,
 
            )
 
        except EnvironmentError as e:
 
            log.error(traceback.format_exc())
 
            raise exc.HTTPExpectationFailed()
 

	
 
        if git_command in [u'git-receive-pack']:
 
        if git_command in ['git-receive-pack']:
 
            # updating refs manually after each push.
 
            # Needed for pre-1.7.0.4 git clients using regular HTTP mode.
 
            from kallithea.lib.vcs import get_repo
 
            from dulwich.server import update_server_info
 
            repo = get_repo(self.content_path)
 
            if repo:
 
                update_server_info(repo._repo)
 

	
 
        resp = Response()
 
        resp.content_type = 'application/x-%s-result' % git_command.encode('utf-8')
 
        resp.charset = None
 
        resp.app_iter = out
 
        return resp
 

	
 
    def __call__(self, environ, start_response):
 
        req = Request(environ)
 
        _path = self._get_fixedpath(req.path_info)
 
        if _path.startswith('info/refs'):
 
            app = self.inforefs
 
        elif req.accept.acceptable_offers(self.valid_accepts):
 
            app = self.backend
 
        try:
 
            resp = app(req, environ)
 
        except exc.HTTPException as e:
 
            resp = e
 
            log.error(traceback.format_exc())
 
        except Exception as e:
 
            log.error(traceback.format_exc())
 
            resp = exc.HTTPInternalServerError()
 
        return resp(environ, start_response)
 

	
 

	
 
class GitDirectory(object):
 

	
 
    def __init__(self, repo_root, repo_name):
 
        repo_location = os.path.join(repo_root, repo_name)
 
        if not os.path.isdir(repo_location):
 
            raise OSError(repo_location)
 

	
 
        self.content_path = repo_location
 
        self.repo_name = repo_name
 
        self.repo_location = repo_location
 

	
 
    def __call__(self, environ, start_response):
 
        content_path = self.content_path
 
        try:
 
            app = GitRepository(self.repo_name, content_path)
 
        except (AssertionError, OSError):
kallithea/lib/utils.py
Show inline comments
 
@@ -100,97 +100,97 @@ def fix_repo_id_name(path):
 
    provided path.
 
    """
 
    first, rest = path, ''
 
    if '/' in path:
 
        first, rest_ = path.split('/', 1)
 
        rest = '/' + rest_
 
    repo_id = _get_permanent_id(first)
 
    if repo_id is not None:
 
        repo = Repository.get(repo_id)
 
        if repo is not None:
 
            return repo.repo_name + rest
 
    return path
 

	
 

	
 
def action_logger(user, action, repo, ipaddr='', commit=False):
 
    """
 
    Action logger for various actions made by users
 

	
 
    :param user: user that made this action, can be a unique username string or
 
        object containing user_id attribute
 
    :param action: action to log, should be on of predefined unique actions for
 
        easy translations
 
    :param repo: string name of repository or object containing repo_id,
 
        that action was made on
 
    :param ipaddr: optional IP address from what the action was made
 

	
 
    """
 

	
 
    # if we don't get explicit IP address try to get one from registered user
 
    # in tmpl context var
 
    if not ipaddr:
 
        ipaddr = getattr(get_current_authuser(), 'ip_addr', '')
 

	
 
    if getattr(user, 'user_id', None):
 
        user_obj = User.get(user.user_id)
 
    elif isinstance(user, str):
 
        user_obj = User.get_by_username(user)
 
    else:
 
        raise Exception('You have to provide a user object or a username')
 

	
 
    if getattr(repo, 'repo_id', None):
 
        repo_obj = Repository.get(repo.repo_id)
 
        repo_name = repo_obj.repo_name
 
    elif isinstance(repo, str):
 
        repo_name = repo.lstrip('/')
 
        repo_obj = Repository.get_by_repo_name(repo_name)
 
    else:
 
        repo_obj = None
 
        repo_name = u''
 
        repo_name = ''
 

	
 
    user_log = UserLog()
 
    user_log.user_id = user_obj.user_id
 
    user_log.username = user_obj.username
 
    user_log.action = action
 

	
 
    user_log.repository = repo_obj
 
    user_log.repository_name = repo_name
 

	
 
    user_log.action_date = datetime.datetime.now()
 
    user_log.user_ip = ipaddr
 
    meta.Session().add(user_log)
 

	
 
    log.info('Logging action:%s on %s by user:%s ip:%s',
 
             action, repo, user_obj, ipaddr)
 
    if commit:
 
        meta.Session().commit()
 

	
 

	
 
def get_filesystem_repos(path):
 
    """
 
    Scans given path for repos and return (name,(type,path)) tuple
 

	
 
    :param path: path to scan for repositories
 
    :param recursive: recursive search and return names with subdirs in front
 
    """
 

	
 
    # remove ending slash for better results
 
    path = path.rstrip(os.sep)
 
    log.debug('now scanning in %s', path)
 

	
 
    def isdir(*n):
 
        return os.path.isdir(os.path.join(*n))
 

	
 
    for root, dirs, _files in os.walk(path):
 
        recurse_dirs = []
 
        for subdir in dirs:
 
            # skip removed repos
 
            if REMOVED_REPO_PAT.match(subdir):
 
                continue
 

	
 
            # skip .<something> dirs TODO: rly? then we should prevent creating them ...
 
            if subdir.startswith('.'):
 
                continue
 

	
 
            cur_path = os.path.join(root, subdir)
 
            if isdir(cur_path, '.git'):
 
                log.warning('ignoring non-bare Git repo: %s', cur_path)
 
@@ -383,97 +383,97 @@ def set_vcs_config(config):
 
    """
 
    Patch VCS config with some Kallithea specific stuff
 

	
 
    :param config: kallithea.CONFIG
 
    """
 
    settings.BACKENDS = {
 
        'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository',
 
        'git': 'kallithea.lib.vcs.backends.git.GitRepository',
 
    }
 

	
 
    settings.GIT_EXECUTABLE_PATH = config.get('git_path', 'git')
 
    settings.GIT_REV_FILTER = config.get('git_rev_filter', '--all').strip()
 
    settings.DEFAULT_ENCODINGS = aslist(config.get('default_encoding',
 
                                                        'utf-8'), sep=',')
 

	
 

	
 
def set_indexer_config(config):
 
    """
 
    Update Whoosh index mapping
 

	
 
    :param config: kallithea.CONFIG
 
    """
 
    log.debug('adding extra into INDEX_EXTENSIONS')
 
    kallithea.config.conf.INDEX_EXTENSIONS.extend(re.split(r'\s+', config.get('index.extensions', '')))
 

	
 
    log.debug('adding extra into INDEX_FILENAMES')
 
    kallithea.config.conf.INDEX_FILENAMES.extend(re.split(r'\s+', config.get('index.filenames', '')))
 

	
 

	
 
def map_groups(path):
 
    """
 
    Given a full path to a repository, create all nested groups that this
 
    repo is inside. This function creates parent-child relationships between
 
    groups and creates default perms for all new groups.
 

	
 
    :param paths: full path to repository
 
    """
 
    from kallithea.model.repo_group import RepoGroupModel
 
    sa = meta.Session()
 
    groups = path.split(Repository.url_sep())
 
    parent = None
 
    group = None
 

	
 
    # last element is repo in nested groups structure
 
    groups = groups[:-1]
 
    rgm = RepoGroupModel()
 
    owner = User.get_first_admin()
 
    for lvl, group_name in enumerate(groups):
 
        group_name = u'/'.join(groups[:lvl] + [group_name])
 
        group_name = '/'.join(groups[:lvl] + [group_name])
 
        group = RepoGroup.get_by_group_name(group_name)
 
        desc = '%s group' % group_name
 

	
 
        # skip folders that are now removed repos
 
        if REMOVED_REPO_PAT.match(group_name):
 
            break
 

	
 
        if group is None:
 
            log.debug('creating group level: %s group_name: %s',
 
                      lvl, group_name)
 
            group = RepoGroup(group_name, parent)
 
            group.group_description = desc
 
            group.owner = owner
 
            sa.add(group)
 
            rgm._create_default_perms(group)
 
            sa.flush()
 

	
 
        parent = group
 
    return group
 

	
 

	
 
def repo2db_mapper(initial_repo_dict, remove_obsolete=False,
 
                   install_git_hooks=False, user=None, overwrite_git_hooks=False):
 
    """
 
    maps all repos given in initial_repo_dict, non existing repositories
 
    are created, if remove_obsolete is True it also check for db entries
 
    that are not in initial_repo_dict and removes them.
 

	
 
    :param initial_repo_dict: mapping with repositories found by scanning methods
 
    :param remove_obsolete: check for obsolete entries in database
 
    :param install_git_hooks: if this is True, also check and install git hook
 
        for a repo if missing
 
    :param overwrite_git_hooks: if this is True, overwrite any existing git hooks
 
        that may be encountered (even if user-deployed)
 
    """
 
    from kallithea.model.repo import RepoModel
 
    from kallithea.model.scm import ScmModel
 
    sa = meta.Session()
 
    repo_model = RepoModel()
 
    if user is None:
 
        user = User.get_first_admin()
 
    added = []
 

	
 
    # creation defaults
 
    defs = Setting.get_default_repo_settings(strip_prefix=True)
 
    enable_statistics = defs.get('repo_enable_statistics')
 
    enable_downloads = defs.get('repo_enable_downloads')
 
    private = defs.get('repo_private')
kallithea/lib/utils2.py
Show inline comments
 
@@ -194,102 +194,102 @@ def age(prevdate, show_short_version=Fal
 
        prevdate = prevdate.replace(microsecond=0)
 
    # Get date parts deltas
 
    from dateutil import relativedelta
 
    for part in order:
 
        d = relativedelta.relativedelta(now, prevdate)
 
        deltas[part] = getattr(d, part + 's')
 

	
 
    # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
 
    # not 1 hour, -59 minutes and -59 seconds)
 
    for num, length in [(5, 60), (4, 60), (3, 24)]:  # seconds, minutes, hours
 
        part = order[num]
 
        carry_part = order[num - 1]
 

	
 
        if deltas[part] < 0:
 
            deltas[part] += length
 
            deltas[carry_part] -= 1
 

	
 
    # Same thing for days except that the increment depends on the (variable)
 
    # number of days in the month
 
    month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
 
    if deltas['day'] < 0:
 
        if prevdate.month == 2 and (prevdate.year % 4 == 0 and
 
            (prevdate.year % 100 != 0 or prevdate.year % 400 == 0)
 
        ):
 
            deltas['day'] += 29
 
        else:
 
            deltas['day'] += month_lengths[prevdate.month - 1]
 

	
 
        deltas['month'] -= 1
 

	
 
    if deltas['month'] < 0:
 
        deltas['month'] += 12
 
        deltas['year'] -= 1
 

	
 
    # In short version, we want nicer handling of ages of more than a year
 
    if show_short_version:
 
        if deltas['year'] == 1:
 
            # ages between 1 and 2 years: show as months
 
            deltas['month'] += 12
 
            deltas['year'] = 0
 
        if deltas['year'] >= 2:
 
            # ages 2+ years: round
 
            if deltas['month'] > 6:
 
                deltas['year'] += 1
 
                deltas['month'] = 0
 

	
 
    # Format the result
 
    fmt_funcs = {
 
        'year': lambda d: ungettext(u'%d year', '%d years', d) % d,
 
        'month': lambda d: ungettext(u'%d month', '%d months', d) % d,
 
        'day': lambda d: ungettext(u'%d day', '%d days', d) % d,
 
        'hour': lambda d: ungettext(u'%d hour', '%d hours', d) % d,
 
        'minute': lambda d: ungettext(u'%d minute', '%d minutes', d) % d,
 
        'second': lambda d: ungettext(u'%d second', '%d seconds', d) % d,
 
        'year': lambda d: ungettext('%d year', '%d years', d) % d,
 
        'month': lambda d: ungettext('%d month', '%d months', d) % d,
 
        'day': lambda d: ungettext('%d day', '%d days', d) % d,
 
        'hour': lambda d: ungettext('%d hour', '%d hours', d) % d,
 
        'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d,
 
        'second': lambda d: ungettext('%d second', '%d seconds', d) % d,
 
    }
 

	
 
    for i, part in enumerate(order):
 
        value = deltas[part]
 
        if value == 0:
 
            continue
 

	
 
        if i < 5:
 
            sub_part = order[i + 1]
 
            sub_value = deltas[sub_part]
 
        else:
 
            sub_value = 0
 

	
 
        if sub_value == 0 or show_short_version:
 
            if future:
 
                return _('in %s') % fmt_funcs[part](value)
 
            else:
 
                return _('%s ago') % fmt_funcs[part](value)
 
        if future:
 
            return _('in %s and %s') % (fmt_funcs[part](value),
 
                fmt_funcs[sub_part](sub_value))
 
        else:
 
            return _('%s and %s ago') % (fmt_funcs[part](value),
 
                fmt_funcs[sub_part](sub_value))
 

	
 
    return _('just now')
 

	
 

	
 
def uri_filter(uri):
 
    """
 
    Removes user:password from given url string
 

	
 
    :param uri:
 
    :rtype: str
 
    :returns: filtered list of strings
 
    """
 
    if not uri:
 
        return []
 

	
 
    proto = ''
 

	
 
    for pat in ('https://', 'http://', 'git://'):
 
        if uri.startswith(pat):
 
            uri = uri[len(pat):]
 
            proto = pat
 
            break
 

	
 
    # remove passwords and username
kallithea/lib/vcs/backends/base.py
Show inline comments
 
@@ -981,77 +981,77 @@ class BaseInMemoryChangeset(object):
 
        """
 
        raise NotImplementedError
 

	
 

	
 
class EmptyChangeset(BaseChangeset):
 
    """
 
    An dummy empty changeset. It's possible to pass hash when creating
 
    an EmptyChangeset
 
    """
 

	
 
    def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
 
                 alias=None, revision=-1, message='', author='', date=None):
 
        self._empty_cs = cs
 
        self.revision = revision
 
        self.message = message
 
        self.author = author
 
        self.date = date or datetime.datetime.fromtimestamp(0)
 
        self.repository = repo
 
        self.requested_revision = requested_revision
 
        self.alias = alias
 

	
 
    @LazyProperty
 
    def raw_id(self):
 
        """
 
        Returns raw string identifying this changeset, useful for web
 
        representation.
 
        """
 

	
 
        return self._empty_cs
 

	
 
    @LazyProperty
 
    def branch(self):
 
        from kallithea.lib.vcs.backends import get_backend
 
        return get_backend(self.alias).DEFAULT_BRANCH_NAME
 

	
 
    @LazyProperty
 
    def branches(self):
 
        from kallithea.lib.vcs.backends import get_backend
 
        return [get_backend(self.alias).DEFAULT_BRANCH_NAME]
 

	
 
    @LazyProperty
 
    def short_id(self):
 
        return self.raw_id[:12]
 

	
 
    def get_file_changeset(self, path):
 
        return self
 

	
 
    def get_file_content(self, path):
 
        return u''
 
        return ''
 

	
 
    def get_file_size(self, path):
 
        return 0
 

	
 

	
 
class CollectionGenerator(object):
 

	
 
    def __init__(self, repo, revs):
 
        self.repo = repo
 
        self.revs = revs
 

	
 
    def __len__(self):
 
        return len(self.revs)
 

	
 
    def __iter__(self):
 
        for rev in self.revs:
 
            yield self.repo.get_changeset(rev)
 

	
 
    def __getitem__(self, what):
 
        """Return either a single element by index, or a sliced collection."""
 
        if isinstance(what, slice):
 
            return CollectionGenerator(self.repo, self.revs[what])
 
        else:
 
            # single item
 
            return self.repo.get_changeset(self.revs[what])
 

	
 
    def __repr__(self):
 
        return '<CollectionGenerator[len:%s]>' % (len(self))
kallithea/lib/vcs/backends/git/repository.py
Show inline comments
 
@@ -306,97 +306,97 @@ class GitRepository(BaseRepository):
 
        changeset at the given ``revision``.
 
        """
 
        return self._get_revision(ref_name)
 

	
 
    def _get_archives(self, archive_name='tip'):
 

	
 
        for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
 
            yield {"type": i[0], "extension": i[1], "node": archive_name}
 

	
 
    def _get_url(self, url):
 
        """
 
        Returns normalized url. If schema is not given, would fall to
 
        filesystem (``file:///``) schema.
 
        """
 
        if url != 'default' and '://' not in url:
 
            url = ':///'.join(('file', url))
 
        return url
 

	
 
    @LazyProperty
 
    def name(self):
 
        return os.path.basename(self.path)
 

	
 
    @LazyProperty
 
    def last_change(self):
 
        """
 
        Returns last change made on this repository as datetime object
 
        """
 
        return date_fromtimestamp(self._get_mtime(), makedate()[1])
 

	
 
    def _get_mtime(self):
 
        try:
 
            return time.mktime(self.get_changeset().date.timetuple())
 
        except RepositoryError:
 
            idx_loc = '' if self.bare else '.git'
 
            # fallback to filesystem
 
            in_path = os.path.join(self.path, idx_loc, "index")
 
            he_path = os.path.join(self.path, idx_loc, "HEAD")
 
            if os.path.exists(in_path):
 
                return os.stat(in_path).st_mtime
 
            else:
 
                return os.stat(he_path).st_mtime
 

	
 
    @LazyProperty
 
    def description(self):
 
        return safe_str(self._repo.get_description() or b'unknown')
 

	
 
    @LazyProperty
 
    def contact(self):
 
        undefined_contact = u'Unknown'
 
        undefined_contact = 'Unknown'
 
        return undefined_contact
 

	
 
    @property
 
    def branches(self):
 
        if not self.revisions:
 
            return {}
 
        sortkey = lambda ctx: ctx[0]
 
        _branches = [(safe_str(key), ascii_str(sha))
 
                     for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H']
 
        return OrderedDict(sorted(_branches, key=sortkey, reverse=False))
 

	
 
    @LazyProperty
 
    def closed_branches(self):
 
        return {}
 

	
 
    @LazyProperty
 
    def tags(self):
 
        return self._get_tags()
 

	
 
    def _get_tags(self):
 
        if not self.revisions:
 
            return {}
 

	
 
        sortkey = lambda ctx: ctx[0]
 
        _tags = [(safe_str(key), ascii_str(sha))
 
                 for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T']
 
        return OrderedDict(sorted(_tags, key=sortkey, reverse=True))
 

	
 
    def tag(self, name, user, revision=None, message=None, date=None,
 
            **kwargs):
 
        """
 
        Creates and returns a tag for the given ``revision``.
 

	
 
        :param name: name for new tag
 
        :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
 
        :param revision: changeset id for which new tag would be created
 
        :param message: message of the tag's commit
 
        :param date: date of tag's commit
 

	
 
        :raises TagAlreadyExistError: if tag with same name already exists
 
        """
 
        if name in self.tags:
 
            raise TagAlreadyExistError("Tag %s already exists" % name)
 
        changeset = self.get_changeset(revision)
 
        message = message or "Added tag %s for commit %s" % (name,
 
            changeset.raw_id)
 
        self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id
 

	
kallithea/lib/vcs/nodes.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
"""
 
    vcs.nodes
 
    ~~~~~~~~~
 

	
 
    Module holding everything related to vcs nodes.
 

	
 
    :created_on: Apr 8, 2010
 
    :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
 
"""
 

	
 
import functools
 
import mimetypes
 
import posixpath
 
import stat
 

	
 
from kallithea.lib.vcs.backends.base import EmptyChangeset
 
from kallithea.lib.vcs.exceptions import NodeError, RemovedFileNodeError
 
from kallithea.lib.vcs.utils import safe_bytes, safe_str
 
from kallithea.lib.vcs.utils.lazy import LazyProperty
 

	
 

	
 
class NodeKind:
 
    SUBMODULE = -1
 
    DIR = 1
 
    FILE = 2
 

	
 

	
 
class NodeState:
 
    ADDED = u'added'
 
    CHANGED = u'changed'
 
    NOT_CHANGED = u'not changed'
 
    REMOVED = u'removed'
 
    ADDED = 'added'
 
    CHANGED = 'changed'
 
    NOT_CHANGED = 'not changed'
 
    REMOVED = 'removed'
 

	
 

	
 
class NodeGeneratorBase(object):
 
    """
 
    Base class for removed added and changed filenodes, it's a lazy generator
 
    class that will create filenodes only on iteration or call
 

	
 
    The len method doesn't need to create filenodes at all
 
    """
 

	
 
    def __init__(self, current_paths, cs):
 
        self.cs = cs
 
        self.current_paths = current_paths
 

	
 
    def __getitem__(self, key):
 
        assert isinstance(key, slice), key
 
        for p in self.current_paths[key]:
 
            yield self.cs.get_node(p)
 

	
 
    def __len__(self):
 
        return len(self.current_paths)
 

	
 
    def __iter__(self):
 
        for p in self.current_paths:
 
            yield self.cs.get_node(p)
 

	
 

	
 
class AddedFileNodesGenerator(NodeGeneratorBase):
 
    """
 
    Class holding Added files for current changeset
 
    """
 
    pass
 

	
 

	
 
class ChangedFileNodesGenerator(NodeGeneratorBase):
 
    """
 
    Class holding Changed files for current changeset
 
    """
 
    pass
 

	
 

	
 
class RemovedFileNodesGenerator(NodeGeneratorBase):
 
    """
 
    Class holding removed files for current changeset
 
    """
 
    def __iter__(self):
 
        for p in self.current_paths:
 
            yield RemovedFileNode(path=p)
 
@@ -561,49 +561,49 @@ class DirNode(Node):
 
        return size
 

	
 
    def __repr__(self):
 
        return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
 
                                 getattr(self.changeset, 'short_id', ''))
 

	
 

	
 
class RootNode(DirNode):
 
    """
 
    DirNode being the root node of the repository.
 
    """
 

	
 
    def __init__(self, nodes=(), changeset=None):
 
        super(RootNode, self).__init__(path='', nodes=nodes,
 
            changeset=changeset)
 

	
 
    def __repr__(self):
 
        return '<%s>' % self.__class__.__name__
 

	
 

	
 
class SubModuleNode(Node):
 
    """
 
    represents a SubModule of Git or SubRepo of Mercurial
 
    """
 
    is_binary = False
 
    size = 0
 

	
 
    def __init__(self, name, url, changeset=None, alias=None):
 
        # Note: Doesn't call Node.__init__!
 
        self.path = name
 
        self.kind = NodeKind.SUBMODULE
 
        self.alias = alias
 
        # we have to use emptyChangeset here since this can point to svn/git/hg
 
        # submodules we cannot get from repository
 
        self.changeset = EmptyChangeset(changeset, alias=alias)
 
        self.url = url
 

	
 
    def __repr__(self):
 
        return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
 
                                 getattr(self.changeset, 'short_id', ''))
 

	
 
    @LazyProperty
 
    def name(self):
 
        """
 
        Returns name of the node so if its path
 
        then only last part is returned.
 
        """
 
        org = self.path.rstrip('/').rsplit('/', 1)[-1]
 
        return u'%s @ %s' % (org, self.changeset.short_id)
 
        return '%s @ %s' % (org, self.changeset.short_id)
kallithea/model/db.py
Show inline comments
 
@@ -914,99 +914,99 @@ class UserGroupMember(Base, BaseDbModel)
 
class RepositoryField(Base, BaseDbModel):
 
    __tablename__ = 'repositories_fields'
 
    __table_args__ = (
 
        UniqueConstraint('repository_id', 'field_key'),  # no-multi field
 
        _table_args_default_dict,
 
    )
 

	
 
    PREFIX = 'ex_'  # prefix used in form to not conflict with already existing fields
 

	
 
    repo_field_id = Column(Integer(), primary_key=True)
 
    repository_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    field_key = Column(String(250), nullable=False)
 
    field_label = Column(String(1024), nullable=False)
 
    field_value = Column(String(10000), nullable=False)
 
    field_desc = Column(String(1024), nullable=False)
 
    field_type = Column(String(255), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repository = relationship('Repository')
 

	
 
    @property
 
    def field_key_prefixed(self):
 
        return 'ex_%s' % self.field_key
 

	
 
    @classmethod
 
    def un_prefix_key(cls, key):
 
        if key.startswith(cls.PREFIX):
 
            return key[len(cls.PREFIX):]
 
        return key
 

	
 
    @classmethod
 
    def get_by_key_name(cls, key, repo):
 
        row = cls.query() \
 
                .filter(cls.repository == repo) \
 
                .filter(cls.field_key == key).scalar()
 
        return row
 

	
 

	
 
class Repository(Base, BaseDbModel):
 
    __tablename__ = 'repositories'
 
    __table_args__ = (
 
        Index('r_repo_name_idx', 'repo_name'),
 
        _table_args_default_dict,
 
    )
 

	
 
    DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
 
    DEFAULT_CLONE_SSH = 'ssh://{system_user}@{hostname}/{repo}'
 

	
 
    STATE_CREATED = u'repo_state_created'
 
    STATE_PENDING = u'repo_state_pending'
 
    STATE_ERROR = u'repo_state_error'
 
    STATE_CREATED = 'repo_state_created'
 
    STATE_PENDING = 'repo_state_pending'
 
    STATE_ERROR = 'repo_state_error'
 

	
 
    repo_id = Column(Integer(), primary_key=True)
 
    repo_name = Column(Unicode(255), nullable=False, unique=True)
 
    repo_state = Column(String(255), nullable=False)
 

	
 
    clone_uri = Column(String(255), nullable=True) # FIXME: not nullable?
 
    repo_type = Column(String(255), nullable=False) # 'hg' or 'git'
 
    owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    private = Column(Boolean(), nullable=False)
 
    enable_statistics = Column("statistics", Boolean(), nullable=False, default=True)
 
    enable_downloads = Column("downloads", Boolean(), nullable=False, default=True)
 
    description = Column(Unicode(10000), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    _landing_revision = Column("landing_revision", String(255), nullable=False)
 
    _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data # FIXME: not nullable?
 

	
 
    fork_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=True)
 
    group_id = Column(Integer(), ForeignKey('groups.group_id'), nullable=True)
 

	
 
    owner = relationship('User')
 
    fork = relationship('Repository', remote_side=repo_id)
 
    group = relationship('RepoGroup')
 
    repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
 
    stats = relationship('Statistics', cascade='all', uselist=False)
 

	
 
    followers = relationship('UserFollowing',
 
                             primaryjoin='UserFollowing.follows_repository_id==Repository.repo_id',
 
                             cascade='all')
 
    extra_fields = relationship('RepositoryField',
 
                                cascade="all, delete-orphan")
 

	
 
    logs = relationship('UserLog')
 
    comments = relationship('ChangesetComment', cascade="all, delete-orphan")
 

	
 
    pull_requests_org = relationship('PullRequest',
 
                    primaryjoin='PullRequest.org_repo_id==Repository.repo_id',
 
                    cascade="all, delete-orphan")
 

	
 
    pull_requests_other = relationship('PullRequest',
 
                    primaryjoin='PullRequest.other_repo_id==Repository.repo_id',
 
                    cascade="all, delete-orphan")
 

	
 
    def __repr__(self):
 
        return "<%s %s: %r>" % (self.__class__.__name__,
 
                                  self.repo_id, self.repo_name)
 

	
 
@@ -1445,97 +1445,97 @@ class Repository(Base, BaseDbModel):
 

	
 

	
 
class RepoGroup(Base, BaseDbModel):
 
    __tablename__ = 'groups'
 
    __table_args__ = (
 
        _table_args_default_dict,
 
    )
 

	
 
    SEP = ' &raquo; '
 

	
 
    group_id = Column(Integer(), primary_key=True)
 
    group_name = Column(Unicode(255), nullable=False, unique=True) # full path
 
    parent_group_id = Column('group_parent_id', Integer(), ForeignKey('groups.group_id'), nullable=True)
 
    group_description = Column(Unicode(10000), nullable=False)
 
    owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
 
    users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
 
    parent_group = relationship('RepoGroup', remote_side=group_id)
 
    owner = relationship('User')
 

	
 
    @classmethod
 
    def query(cls, sorted=False):
 
        """Add RepoGroup-specific helpers for common query constructs.
 

	
 
        sorted: if True, apply the default ordering (name, case insensitive).
 
        """
 
        q = super(RepoGroup, cls).query()
 

	
 
        if sorted:
 
            q = q.order_by(sqlalchemy.func.lower(RepoGroup.group_name))
 

	
 
        return q
 

	
 
    def __init__(self, group_name='', parent_group=None):
 
        self.group_name = group_name
 
        self.parent_group = parent_group
 

	
 
    def __repr__(self):
 
        return "<%s %s: %s>" % (self.__class__.__name__,
 
                                self.group_id, self.group_name)
 

	
 
    @classmethod
 
    def _generate_choice(cls, repo_group):
 
        """Return tuple with group_id and name as html literal"""
 
        from webhelpers2.html import literal
 
        if repo_group is None:
 
            return (-1, u'-- %s --' % _('top level'))
 
            return (-1, '-- %s --' % _('top level'))
 
        return repo_group.group_id, literal(cls.SEP.join(repo_group.full_path_splitted))
 

	
 
    @classmethod
 
    def groups_choices(cls, groups):
 
        """Return tuples with group_id and name as html literal."""
 
        return sorted((cls._generate_choice(g) for g in groups),
 
                      key=lambda c: c[1].split(cls.SEP))
 

	
 
    @classmethod
 
    def url_sep(cls):
 
        return URL_SEP
 

	
 
    @classmethod
 
    def guess_instance(cls, value):
 
        return super(RepoGroup, cls).guess_instance(value, RepoGroup.get_by_group_name)
 

	
 
    @classmethod
 
    def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
 
        group_name = group_name.rstrip('/')
 
        if case_insensitive:
 
            gr = cls.query() \
 
                .filter(sqlalchemy.func.lower(cls.group_name) == sqlalchemy.func.lower(group_name))
 
        else:
 
            gr = cls.query() \
 
                .filter(cls.group_name == group_name)
 
        if cache:
 
            gr = gr.options(FromCache(
 
                            "sql_cache_short",
 
                            "get_group_%s" % _hash_key(group_name)
 
                            )
 
            )
 
        return gr.scalar()
 

	
 
    @property
 
    def parents(self):
 
        groups = []
 
        group = self.parent_group
 
        while group is not None:
 
            groups.append(group)
 
            group = group.parent_group
 
            assert group not in groups, group # avoid recursion on bad db content
 
        groups.reverse()
 
        return groups
 

	
 
    @property
 
    def children(self):
 
        return RepoGroup.query().filter(RepoGroup.parent_group == self)
 

	
 
@@ -2213,98 +2213,98 @@ class ChangesetStatus(Base, BaseDbModel)
 
    STATUSES_DICT = dict(STATUSES)
 

	
 
    changeset_status_id = Column(Integer(), primary_key=True)
 
    repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
 
    revision = Column(String(40), nullable=True)
 
    status = Column(String(128), nullable=False, default=DEFAULT)
 
    comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
 
    modified_at = Column(DateTime(), nullable=False, default=datetime.datetime.now)
 
    version = Column(Integer(), nullable=False, default=0)
 
    pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
 

	
 
    author = relationship('User')
 
    repo = relationship('Repository')
 
    comment = relationship('ChangesetComment')
 
    pull_request = relationship('PullRequest')
 

	
 
    def __repr__(self):
 
        return "<%s %r by %r>" % (
 
            self.__class__.__name__,
 
            self.status, self.author
 
        )
 

	
 
    @classmethod
 
    def get_status_lbl(cls, value):
 
        return cls.STATUSES_DICT.get(value)
 

	
 
    @property
 
    def status_lbl(self):
 
        return ChangesetStatus.get_status_lbl(self.status)
 

	
 
    def __json__(self):
 
        return dict(
 
            status=self.status,
 
            modified_at=self.modified_at.replace(microsecond=0),
 
            reviewer=self.author.username,
 
            )
 

	
 

	
 
class PullRequest(Base, BaseDbModel):
 
    __tablename__ = 'pull_requests'
 
    __table_args__ = (
 
        Index('pr_org_repo_id_idx', 'org_repo_id'),
 
        Index('pr_other_repo_id_idx', 'other_repo_id'),
 
        _table_args_default_dict,
 
    )
 

	
 
    # values for .status
 
    STATUS_NEW = u'new'
 
    STATUS_CLOSED = u'closed'
 
    STATUS_NEW = 'new'
 
    STATUS_CLOSED = 'closed'
 

	
 
    pull_request_id = Column(Integer(), primary_key=True)
 
    title = Column(Unicode(255), nullable=False)
 
    description = Column(UnicodeText(), nullable=False)
 
    status = Column(Unicode(255), nullable=False, default=STATUS_NEW) # only for closedness, not approve/reject/etc
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    updated_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    _revisions = Column('revisions', UnicodeText(), nullable=False)
 
    org_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    org_ref = Column(Unicode(255), nullable=False)
 
    other_repo_id = Column(Integer(), ForeignKey('repositories.repo_id'), nullable=False)
 
    other_ref = Column(Unicode(255), nullable=False)
 

	
 
    @hybrid_property
 
    def revisions(self):
 
        return self._revisions.split(':')
 

	
 
    @revisions.setter
 
    def revisions(self, val):
 
        self._revisions = ':'.join(val)
 

	
 
    @property
 
    def org_ref_parts(self):
 
        return self.org_ref.split(':')
 

	
 
    @property
 
    def other_ref_parts(self):
 
        return self.other_ref.split(':')
 

	
 
    owner = relationship('User')
 
    reviewers = relationship('PullRequestReviewer',
 
                             cascade="all, delete-orphan")
 
    org_repo = relationship('Repository', primaryjoin='PullRequest.org_repo_id==Repository.repo_id')
 
    other_repo = relationship('Repository', primaryjoin='PullRequest.other_repo_id==Repository.repo_id')
 
    statuses = relationship('ChangesetStatus', order_by='ChangesetStatus.changeset_status_id')
 
    comments = relationship('ChangesetComment', order_by='ChangesetComment.comment_id',
 
                             cascade="all, delete-orphan")
 

	
 
    @classmethod
 
    def query(cls, reviewer_id=None, include_closed=True, sorted=False):
 
        """Add PullRequest-specific helpers for common query constructs.
 

	
 
        reviewer_id: only PRs with the specified user added as reviewer.
 

	
 
        include_closed: if False, do not include closed PRs.
 

	
 
        sorted: if True, apply the default ordering (newest first).
 
@@ -2383,99 +2383,99 @@ class PullRequest(Base, BaseDbModel):
 
            s = '/_/' + self.title
 
        kwargs['extra'] = urlreadable(s)
 
        if canonical:
 
            return h.canonical_url('pullrequest_show', repo_name=self.other_repo.repo_name,
 
                                   pull_request_id=self.pull_request_id, **kwargs)
 
        return h.url('pullrequest_show', repo_name=self.other_repo.repo_name,
 
                     pull_request_id=self.pull_request_id, **kwargs)
 

	
 

	
 
class PullRequestReviewer(Base, BaseDbModel):
 
    __tablename__ = 'pull_request_reviewers'
 
    __table_args__ = (
 
        Index('pull_request_reviewers_user_id_idx', 'user_id'),
 
        _table_args_default_dict,
 
    )
 

	
 
    def __init__(self, user=None, pull_request=None):
 
        self.user = user
 
        self.pull_request = pull_request
 

	
 
    pull_request_reviewers_id = Column('pull_requests_reviewers_id', Integer(), primary_key=True)
 
    pull_request_id = Column(Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
 
    user_id = Column(Integer(), ForeignKey('users.user_id'), nullable=False)
 

	
 
    user = relationship('User')
 
    pull_request = relationship('PullRequest')
 

	
 
    def __json__(self):
 
        return dict(
 
            username=self.user.username if self.user else None,
 
        )
 

	
 

	
 
class Notification(object):
 
    __tablename__ = 'notifications'
 

	
 
class UserNotification(object):
 
    __tablename__ = 'user_to_notification'
 

	
 

	
 
class Gist(Base, BaseDbModel):
 
    __tablename__ = 'gists'
 
    __table_args__ = (
 
        Index('g_gist_access_id_idx', 'gist_access_id'),
 
        Index('g_created_on_idx', 'created_on'),
 
        _table_args_default_dict,
 
    )
 

	
 
    GIST_PUBLIC = u'public'
 
    GIST_PRIVATE = u'private'
 
    DEFAULT_FILENAME = u'gistfile1.txt'
 
    GIST_PUBLIC = 'public'
 
    GIST_PRIVATE = 'private'
 
    DEFAULT_FILENAME = 'gistfile1.txt'
 

	
 
    gist_id = Column(Integer(), primary_key=True)
 
    gist_access_id = Column(Unicode(250), nullable=False)
 
    gist_description = Column(UnicodeText(), nullable=False)
 
    owner_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
 
    gist_expires = Column(Float(53), nullable=False)
 
    gist_type = Column(Unicode(128), nullable=False)
 
    created_on = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 
    modified_at = Column(DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
 

	
 
    owner = relationship('User')
 

	
 
    @hybrid_property
 
    def is_expired(self):
 
        return (self.gist_expires != -1) & (time.time() > self.gist_expires)
 

	
 
    def __repr__(self):
 
        return "<%s %s %s>" % (
 
            self.__class__.__name__,
 
            self.gist_type, self.gist_access_id)
 

	
 
    @classmethod
 
    def guess_instance(cls, value):
 
        return super(Gist, cls).guess_instance(value, Gist.get_by_access_id)
 

	
 
    @classmethod
 
    def get_or_404(cls, id_):
 
        res = cls.query().filter(cls.gist_access_id == id_).scalar()
 
        if res is None:
 
            raise HTTPNotFound
 
        return res
 

	
 
    @classmethod
 
    def get_by_access_id(cls, gist_access_id):
 
        return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
 

	
 
    def gist_url(self):
 
        alias_url = kallithea.CONFIG.get('gist_alias_url')
 
        if alias_url:
 
            return alias_url.replace('{gistid}', self.gist_access_id)
 

	
 
        import kallithea.lib.helpers as h
 
        return h.canonical_url('gist', gist_id=self.gist_access_id)
 

	
 
    @classmethod
 
    def base_path(cls):
 
        """
 
        Returns base path where all gists are stored
kallithea/model/forms.py
Show inline comments
 
@@ -513,56 +513,56 @@ def UserExtraEmailForm():
 
    return _UserExtraEmailForm
 

	
 

	
 
def UserExtraIpForm():
 
    class _UserExtraIpForm(formencode.Schema):
 
        ip = v.ValidIp()(not_empty=True)
 
    return _UserExtraIpForm
 

	
 

	
 
def PullRequestForm(repo_id):
 
    class _PullRequestForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        org_repo = v.UnicodeString(strip=True, required=True)
 
        org_ref = v.UnicodeString(strip=True, required=True)
 
        other_repo = v.UnicodeString(strip=True, required=True)
 
        other_ref = v.UnicodeString(strip=True, required=True)
 

	
 
        pullrequest_title = v.UnicodeString(strip=True, required=True)
 
        pullrequest_desc = v.UnicodeString(strip=True, required=False)
 

	
 
    return _PullRequestForm
 

	
 

	
 
def PullRequestPostForm():
 
    class _PullRequestPostForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        pullrequest_title = v.UnicodeString(strip=True, required=True)
 
        pullrequest_desc = v.UnicodeString(strip=True, required=False)
 
        org_review_members = v.Set()
 
        review_members = v.Set()
 
        updaterev = v.UnicodeString(strip=True, required=False, if_missing=None)
 
        owner = All(v.UnicodeString(strip=True, required=True),
 
                    v.ValidRepoUser())
 

	
 
    return _PullRequestPostForm
 

	
 

	
 
def GistForm(lifetime_options):
 
    class _GistForm(formencode.Schema):
 
        allow_extra_fields = True
 
        filter_extra_fields = True
 

	
 
        filename = All(v.BasePath()(),
 
                       v.UnicodeString(strip=True, required=False))
 
        description = v.UnicodeString(required=False, if_missing=u'')
 
        description = v.UnicodeString(required=False, if_missing='')
 
        lifetime = v.OneOf(lifetime_options)
 
        mimetype = v.UnicodeString(required=False, if_missing=None)
 
        content = v.UnicodeString(required=True, not_empty=True)
 
        public = v.UnicodeString(required=False, if_missing=u'')
 
        private = v.UnicodeString(required=False, if_missing=u'')
 
        public = v.UnicodeString(required=False, if_missing='')
 
        private = v.UnicodeString(required=False, if_missing='')
 

	
 
    return _GistForm
kallithea/model/gist.py
Show inline comments
 
@@ -5,97 +5,97 @@
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.gist
 
~~~~~~~~~~~~~~~~~~~~
 

	
 
gist model for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import logging
 
import os
 
import random
 
import shutil
 
import time
 
import traceback
 

	
 
from kallithea.lib import ext_json
 
from kallithea.lib.utils2 import AttributeDict, ascii_bytes, safe_int, time_to_datetime
 
from kallithea.model.db import Gist, Session, User
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.scm import ScmModel
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
GIST_STORE_LOC = '.rc_gist_store'
 
GIST_METADATA_FILE = '.rc_gist_metadata'
 

	
 

	
 
def make_gist_access_id():
 
    """Generate a random, URL safe, almost certainly unique gist identifier."""
 
    rnd = random.SystemRandom() # use cryptographically secure system PRNG
 
    alphabet = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz'
 
    length = 20
 
    return u''.join(rnd.choice(alphabet) for _ in range(length))
 
    return ''.join(rnd.choice(alphabet) for _ in range(length))
 

	
 

	
 
class GistModel(object):
 

	
 
    def __delete_gist(self, gist):
 
        """
 
        removes gist from filesystem
 

	
 
        :param gist: gist object
 
        """
 
        root_path = RepoModel().repos_path
 
        rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
 
        log.info("Removing %s", rm_path)
 
        shutil.rmtree(rm_path)
 

	
 
    def _store_metadata(self, repo, gist_id, gist_access_id, user_id, gist_type,
 
                        gist_expires):
 
        """
 
        store metadata inside the gist, this can be later used for imports
 
        or gist identification
 
        """
 
        metadata = {
 
            'metadata_version': '1',
 
            'gist_db_id': gist_id,
 
            'gist_access_id': gist_access_id,
 
            'gist_owner_id': user_id,
 
            'gist_type': gist_type,
 
            'gist_expires': gist_expires,
 
            'gist_updated': time.time(),
 
        }
 
        with open(os.path.join(repo.path, '.hg', GIST_METADATA_FILE), 'wb') as f:
 
            f.write(ascii_bytes(ext_json.dumps(metadata)))
 

	
 
    def get_gist(self, gist):
 
        return Gist.guess_instance(gist)
 

	
 
    def get_gist_files(self, gist_access_id, revision=None):
 
        """
 
        Get files for given gist
 

	
 
        :param gist_access_id:
 
        """
 
        repo = Gist.get_by_access_id(gist_access_id)
 
        cs = repo.scm_instance.get_changeset(revision)
 
        return cs, [n for n in cs.get_node('/')]
 

	
 
    def create(self, description, owner, ip_addr, gist_mapping,
 
               gist_type=Gist.GIST_PUBLIC, lifetime=-1):
kallithea/model/notification.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.model.notification
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Model for notifications
 

	
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Nov 20, 2011
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import datetime
 
import logging
 

	
 
from tg import app_globals
 
from tg import tmpl_context as c
 
from tg.i18n import ugettext as _
 

	
 
import kallithea
 
from kallithea.lib import helpers as h
 
from kallithea.model.db import User
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 

	
 
class NotificationModel(object):
 

	
 
    TYPE_CHANGESET_COMMENT = u'cs_comment'
 
    TYPE_MESSAGE = u'message'
 
    TYPE_MENTION = u'mention' # not used
 
    TYPE_REGISTRATION = u'registration'
 
    TYPE_PULL_REQUEST = u'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
 
    TYPE_CHANGESET_COMMENT = 'cs_comment'
 
    TYPE_MESSAGE = 'message'
 
    TYPE_MENTION = 'mention' # not used
 
    TYPE_REGISTRATION = 'registration'
 
    TYPE_PULL_REQUEST = 'pull_request'
 
    TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
 

	
 
    def create(self, created_by, subject, body, recipients=None,
 
               type_=TYPE_MESSAGE, with_email=True,
 
               email_kwargs=None, repo_name=None):
 
        """
 

	
 
        Creates notification of given type
 

	
 
        :param created_by: int, str or User instance. User who created this
 
            notification
 
        :param subject:
 
        :param body:
 
        :param recipients: list of int, str or User objects, when None
 
            is given send to all admins
 
        :param type_: type of notification
 
        :param with_email: send email with this notification
 
        :param email_kwargs: additional dict to pass as args to email template
 
        """
 
        from kallithea.lib.celerylib import tasks
 
        email_kwargs = email_kwargs or {}
 
        if recipients and not getattr(recipients, '__iter__', False):
 
            raise Exception('recipients must be a list or iterable')
 

	
 
        created_by_obj = User.guess_instance(created_by)
 

	
 
        recipients_objs = set()
 
        if recipients:
 
            for u in recipients:
 
                obj = User.guess_instance(u)
 
                if obj is not None:
 
                    recipients_objs.add(obj)
 
                else:
 
                    # TODO: inform user that requested operation couldn't be completed
 
                    log.error('cannot email unknown user %r', u)
 
            log.debug('sending notifications %s to %s',
 
                type_, recipients_objs
 
            )
 
        elif recipients is None:
 
            # empty recipients means to all admins
 
            recipients_objs = User.query().filter(User.admin == True).all()
 
            log.debug('sending notifications %s to admins: %s',
 
                type_, recipients_objs
 
            )
 
        #else: silently skip notification mails?
 

	
 
        if not with_email:
 
            return
 

	
kallithea/model/pull_request.py
Show inline comments
 
@@ -220,97 +220,97 @@ class CreatePullRequestAction(object):
 
        # hack: ancestor_rev is not an other_rev but we want to show the
 
        # requested destination and have the exact ancestor
 
        other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev)
 

	
 
        if not title:
 
            if org_repo == other_repo:
 
                title = '%s to %s' % (org_display, other_display)
 
            else:
 
                title = '%s#%s to %s#%s' % (org_repo.repo_name, org_display,
 
                                            other_repo.repo_name, other_display)
 
        description = description or _('No description')
 

	
 
        self.org_repo = org_repo
 
        self.other_repo = other_repo
 
        self.org_ref = org_ref
 
        self.org_rev = org_rev
 
        self.other_ref = other_ref
 
        self.title = title
 
        self.description = description
 
        self.owner = owner
 
        self.reviewers = reviewers
 

	
 
        if not CreatePullRequestAction.is_user_authorized(self.org_repo, self.other_repo):
 
            raise self.Unauthorized(_('You are not authorized to create the pull request'))
 

	
 
    def execute(self):
 
        created_by = User.get(request.authuser.user_id)
 

	
 
        pr = PullRequest()
 
        pr.org_repo = self.org_repo
 
        pr.org_ref = self.org_ref
 
        pr.other_repo = self.other_repo
 
        pr.other_ref = self.other_ref
 
        pr.revisions = self.revisions
 
        pr.title = self.title
 
        pr.description = self.description
 
        pr.owner = self.owner
 
        Session().add(pr)
 
        Session().flush() # make database assign pull_request_id
 

	
 
        if self.org_repo.scm_instance.alias == 'git':
 
            # create a ref under refs/pull/ so that commits don't get garbage-collected
 
            self.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pr.pull_request_id] = ascii_bytes(self.org_rev)
 

	
 
        # reset state to under-review
 
        from kallithea.model.changeset_status import ChangesetStatusModel
 
        from kallithea.model.comment import ChangesetCommentsModel
 
        comment = ChangesetCommentsModel().create(
 
            text=u'',
 
            text='',
 
            repo=self.org_repo,
 
            author=created_by,
 
            pull_request=pr,
 
            send_email=False,
 
            status_change=ChangesetStatus.STATUS_UNDER_REVIEW,
 
        )
 
        ChangesetStatusModel().set_status(
 
            self.org_repo,
 
            ChangesetStatus.STATUS_UNDER_REVIEW,
 
            created_by,
 
            comment,
 
            pull_request=pr,
 
        )
 

	
 
        mention_recipients = extract_mentioned_users(self.description)
 
        PullRequestModel().add_reviewers(created_by, pr, self.reviewers, mention_recipients)
 

	
 
        return pr
 

	
 

	
 
class CreatePullRequestIterationAction(object):
 
    @staticmethod
 
    def is_user_authorized(old_pull_request):
 
        """Performs authorization check with only the minimum amount of
 
        information needed for such a check, rather than a full command
 
        object.
 
        """
 
        if h.HasPermissionAny('hg.admin')():
 
            return True
 

	
 
        # Authorized to edit the old PR?
 
        if request.authuser.user_id != old_pull_request.owner_id:
 
            return False
 

	
 
        # Authorized to create a new PR?
 
        if not CreatePullRequestAction.is_user_authorized(old_pull_request.org_repo, old_pull_request.other_repo):
 
            return False
 

	
 
        return True
 

	
 
    def __init__(self, old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers):
 
        self.old_pull_request = old_pull_request
 

	
 
        org_repo = old_pull_request.org_repo
 
        org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':')
 

	
 
        other_repo = old_pull_request.other_repo
 
        other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor
kallithea/model/repo.py
Show inline comments
 
@@ -227,97 +227,97 @@ class RepoModel(object):
 

	
 
        defaults = repo_info.get_dict()
 
        defaults['repo_name'] = repo_info.just_name
 
        defaults['repo_group'] = repo_info.group_id
 

	
 
        for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'),
 
                         (1, 'repo_description'),
 
                         (1, 'repo_landing_rev'), (0, 'clone_uri'),
 
                         (1, 'repo_private'), (1, 'repo_enable_statistics')]:
 
            attr = k
 
            if strip:
 
                attr = remove_prefix(k, 'repo_')
 

	
 
            val = defaults[attr]
 
            if k == 'repo_landing_rev':
 
                val = ':'.join(defaults[attr])
 
            defaults[k] = val
 
            if k == 'clone_uri':
 
                defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
 

	
 
        # fill owner
 
        if repo_info.owner:
 
            defaults.update({'owner': repo_info.owner.username})
 
        else:
 
            replacement_user = User.query().filter(User.admin ==
 
                                                   True).first().username
 
            defaults.update({'owner': replacement_user})
 

	
 
        # fill repository users
 
        for p in repo_info.repo_to_perm:
 
            defaults.update({'u_perm_%s' % p.user.username:
 
                                 p.permission.permission_name})
 

	
 
        # fill repository groups
 
        for p in repo_info.users_group_to_perm:
 
            defaults.update({'g_perm_%s' % p.users_group.users_group_name:
 
                                 p.permission.permission_name})
 

	
 
        return defaults
 

	
 
    def update(self, repo, **kwargs):
 
        try:
 
            cur_repo = Repository.guess_instance(repo)
 
            org_repo_name = cur_repo.repo_name
 
            if 'owner' in kwargs:
 
                cur_repo.owner = User.get_by_username(kwargs['owner'])
 

	
 
            if 'repo_group' in kwargs:
 
                assert kwargs['repo_group'] != u'-1', kwargs # RepoForm should have converted to None
 
                assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None
 
                cur_repo.group = RepoGroup.get(kwargs['repo_group'])
 
                cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name)
 
            log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
 
            for k in ['repo_enable_downloads',
 
                      'repo_description',
 
                      'repo_landing_rev',
 
                      'repo_private',
 
                      'repo_enable_statistics',
 
                      ]:
 
                if k in kwargs:
 
                    setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k])
 
            clone_uri = kwargs.get('clone_uri')
 
            if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden:
 
                # clone_uri is modified - if given a value, check it is valid
 
                if clone_uri != '':
 
                    # will raise exception on error
 
                    is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui())
 
                cur_repo.clone_uri = clone_uri
 

	
 
            if 'repo_name' in kwargs:
 
                repo_name = kwargs['repo_name']
 
                if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name:
 
                    raise Exception('invalid repo name %s' % repo_name)
 
                cur_repo.repo_name = cur_repo.get_new_name(repo_name)
 

	
 
            # if private flag is set, reset default permission to NONE
 
            if kwargs.get('repo_private'):
 
                EMPTY_PERM = 'repository.none'
 
                RepoModel().grant_user_permission(
 
                    repo=cur_repo, user='default', perm=EMPTY_PERM
 
                )
 
                # handle extra fields
 
            for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]:
 
                k = RepositoryField.un_prefix_key(field)
 
                ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo)
 
                if ex_field:
 
                    ex_field.field_value = kwargs[field]
 

	
 
            if org_repo_name != cur_repo.repo_name:
 
                # rename repository
 
                self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name)
 

	
 
            return cur_repo
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def _create_repo(self, repo_name, repo_type, description, owner,
kallithea/model/repo_group.py
Show inline comments
 
@@ -243,97 +243,97 @@ class RepoGroupModel(object):
 
                    continue
 
            elif recursive == 'groups':
 
                # skip repos
 
                if isinstance(obj, Repository):
 
                    continue
 
            else:  # recursive == 'none': # DEFAULT don't apply to iterated objects
 
                obj = repo_group
 
                # also we do a break at the end of this loop.
 

	
 
            # update permissions
 
            for member, perm, member_type in perms_updates:
 
                ## set for user
 
                if member_type == 'user':
 
                    # this updates also current one if found
 
                    _set_perm_user(obj, user=member, perm=perm)
 
                ## set for user group
 
                else:
 
                    # check if we have permissions to alter this usergroup's access
 
                    if not check_perms or HasUserGroupPermissionLevel('read')(member):
 
                        _set_perm_group(obj, users_group=member, perm=perm)
 
            # set new permissions
 
            for member, perm, member_type in perms_new:
 
                if member_type == 'user':
 
                    _set_perm_user(obj, user=member, perm=perm)
 
                else:
 
                    # check if we have permissions to alter this usergroup's access
 
                    if not check_perms or HasUserGroupPermissionLevel('read')(member):
 
                        _set_perm_group(obj, users_group=member, perm=perm)
 
            updates.append(obj)
 
            # if it's not recursive call for all,repos,groups
 
            # break the loop and don't proceed with other changes
 
            if recursive not in ['all', 'repos', 'groups']:
 
                break
 

	
 
        return updates
 

	
 
    def update(self, repo_group, repo_group_args):
 
        try:
 
            repo_group = RepoGroup.guess_instance(repo_group)
 
            old_path = repo_group.full_path
 

	
 
            # change properties
 
            if 'group_description' in repo_group_args:
 
                repo_group.group_description = repo_group_args['group_description']
 
            if 'parent_group_id' in repo_group_args:
 
                repo_group.parent_group_id = repo_group_args['parent_group_id']
 

	
 
            if 'parent_group_id' in repo_group_args:
 
                assert repo_group_args['parent_group_id'] != u'-1', repo_group_args  # RepoGroupForm should have converted to None
 
                assert repo_group_args['parent_group_id'] != '-1', repo_group_args  # RepoGroupForm should have converted to None
 
                repo_group.parent_group = RepoGroup.get(repo_group_args['parent_group_id'])
 
            if 'group_name' in repo_group_args:
 
                group_name = repo_group_args['group_name']
 
                if kallithea.lib.utils2.repo_name_slug(group_name) != group_name:
 
                    raise Exception('invalid repo group name %s' % group_name)
 
                repo_group.group_name = repo_group.get_new_name(group_name)
 
            new_path = repo_group.full_path
 
            Session().add(repo_group)
 

	
 
            # iterate over all members of this groups and do fixes
 
            # if obj is a repoGroup also fix the name of the group according
 
            # to the parent
 
            # if obj is a Repo fix it's name
 
            # this can be potentially heavy operation
 
            for obj in repo_group.recursive_groups_and_repos():
 
                # set the value from it's parent
 
                if isinstance(obj, RepoGroup):
 
                    new_name = obj.get_new_name(obj.name)
 
                    log.debug('Fixing group %s to new name %s'
 
                                % (obj.group_name, new_name))
 
                    obj.group_name = new_name
 
                elif isinstance(obj, Repository):
 
                    # we need to get all repositories from this new group and
 
                    # rename them accordingly to new group path
 
                    new_name = obj.get_new_name(obj.just_name)
 
                    log.debug('Fixing repo %s to new name %s'
 
                                % (obj.repo_name, new_name))
 
                    obj.repo_name = new_name
 

	
 
            self._rename_group(old_path, new_path)
 

	
 
            return repo_group
 
        except Exception:
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def delete(self, repo_group, force_delete=False):
 
        repo_group = RepoGroup.guess_instance(repo_group)
 
        try:
 
            Session().delete(repo_group)
 
            self._delete_group(repo_group, force_delete)
 
        except Exception:
 
            log.error('Error removing repo_group %s', repo_group)
 
            raise
 

	
 
    def add_permission(self, repo_group, obj, obj_type, perm, recursive):
 
        from kallithea.model.repo import RepoModel
 
        repo_group = RepoGroup.guess_instance(repo_group)
kallithea/model/scm.py
Show inline comments
 
@@ -614,108 +614,108 @@ class ScmModel(object):
 
            parent_cs = EmptyChangeset(alias=scm_instance.alias)
 

	
 
        if isinstance(parent_cs, EmptyChangeset):
 
            # EmptyChangeset means we we're editing empty repository
 
            parents = None
 
        else:
 
            parents = [parent_cs]
 
        # add multiple nodes
 
        for path, content in processed_nodes:
 
            imc.remove(FileNode(path, content=content))
 

	
 
        tip = imc.commit(message=message,
 
                         author=author,
 
                         parents=parents,
 
                         branch=parent_cs.branch)
 

	
 
        if trigger_push_hook:
 
            self._handle_push(scm_instance,
 
                              username=user.username,
 
                              ip_addr=ip_addr,
 
                              action='push_local',
 
                              repo_name=repo.repo_name,
 
                              revisions=[tip.raw_id])
 
        else:
 
            self.mark_for_invalidation(repo.repo_name)
 
        return tip
 

	
 
    def get_unread_journal(self):
 
        return UserLog.query().count()
 

	
 
    def get_repo_landing_revs(self, repo=None):
 
        """
 
        Generates select option with tags branches and bookmarks (for hg only)
 
        grouped by type
 

	
 
        :param repo:
 
        """
 

	
 
        hist_l = []
 
        choices = []
 
        repo = self.__get_repo(repo)
 
        hist_l.append(('rev:tip', _('latest tip')))
 
        choices.append('rev:tip')
 
        if repo is None:
 
            return choices, hist_l
 

	
 
        repo = repo.scm_instance
 

	
 
        branches_group = ([(u'branch:%s' % k, k) for k, v in
 
        branches_group = ([('branch:%s' % k, k) for k, v in
 
                           repo.branches.items()], _("Branches"))
 
        hist_l.append(branches_group)
 
        choices.extend([x[0] for x in branches_group[0]])
 

	
 
        if repo.alias == 'hg':
 
            bookmarks_group = ([(u'book:%s' % k, k) for k, v in
 
            bookmarks_group = ([('book:%s' % k, k) for k, v in
 
                                repo.bookmarks.items()], _("Bookmarks"))
 
            hist_l.append(bookmarks_group)
 
            choices.extend([x[0] for x in bookmarks_group[0]])
 

	
 
        tags_group = ([(u'tag:%s' % k, k) for k, v in
 
        tags_group = ([('tag:%s' % k, k) for k, v in
 
                       repo.tags.items()], _("Tags"))
 
        hist_l.append(tags_group)
 
        choices.extend([x[0] for x in tags_group[0]])
 

	
 
        return choices, hist_l
 

	
 
    def _get_git_hook_interpreter(self):
 
        """Return a suitable interpreter for Git hooks.
 

	
 
        Return a suitable string to be written in the POSIX #! shebang line for
 
        Git hook scripts so they invoke Kallithea code with the right Python
 
        interpreter and in the right environment.
 
        """
 
        # Note: sys.executable might not point at a usable Python interpreter. For
 
        # example, when using uwsgi, it will point at the uwsgi program itself.
 
        # FIXME This may not work on Windows and may need a shell wrapper script.
 
        return (kallithea.CONFIG.get('git_hook_interpreter')
 
                or sys.executable
 
                or '/usr/bin/env python3')
 

	
 
    def install_git_hooks(self, repo, force_create=False):
 
        """
 
        Creates a kallithea hook inside a git repository
 

	
 
        :param repo: Instance of VCS repo
 
        :param force_create: Create even if same name hook exists
 
        """
 

	
 
        loc = os.path.join(repo.path, 'hooks')
 
        if not repo.bare:
 
            loc = os.path.join(repo.path, '.git', 'hooks')
 
        if not os.path.isdir(loc):
 
            os.makedirs(loc)
 

	
 
        tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter())
 
        tmpl_post += pkg_resources.resource_string(
 
            'kallithea', os.path.join('config', 'post_receive_tmpl.py')
 
        )
 
        tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter())
 
        tmpl_pre += pkg_resources.resource_string(
 
            'kallithea', os.path.join('config', 'pre_receive_tmpl.py')
 
        )
 

	
 
        for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
 
            _hook_file = os.path.join(loc, '%s-receive' % h_type)
 
            has_hook = False
 
            log.debug('Installing git hook in repo %s', repo)
 
            if os.path.exists(_hook_file):
kallithea/model/user.py
Show inline comments
 
@@ -49,186 +49,186 @@ log = logging.getLogger(__name__)
 
class UserModel(object):
 
    password_reset_token_lifetime = 86400 # 24 hours
 

	
 
    def get(self, user_id, cache=False):
 
        user = User.query()
 
        if cache:
 
            user = user.options(FromCache("sql_cache_short",
 
                                          "get_user_%s" % user_id))
 
        return user.get(user_id)
 

	
 
    def get_user(self, user):
 
        return User.guess_instance(user)
 

	
 
    def create(self, form_data, cur_user=None):
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.hooks import log_create_user, \
 
            check_allowed_create_user
 
        _fd = form_data
 
        user_data = {
 
            'username': _fd['username'],
 
            'password': _fd['password'],
 
            'email': _fd['email'],
 
            'firstname': _fd['firstname'],
 
            'lastname': _fd['lastname'],
 
            'active': _fd['active'],
 
            'admin': False
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 
        from kallithea.lib.auth import get_crypt_password
 

	
 
        new_user = User()
 
        for k, v in form_data.items():
 
            if k == 'password':
 
                v = get_crypt_password(v)
 
            if k == 'firstname':
 
                k = 'name'
 
            setattr(new_user, k, v)
 

	
 
        new_user.api_key = generate_api_key()
 
        Session().add(new_user)
 
        Session().flush() # make database assign new_user.user_id
 

	
 
        log_create_user(new_user.get_dict(), cur_user)
 
        return new_user
 

	
 
    def create_or_update(self, username, password, email, firstname=u'',
 
                         lastname=u'', active=True, admin=False,
 
    def create_or_update(self, username, password, email, firstname='',
 
                         lastname='', active=True, admin=False,
 
                         extern_type=None, extern_name=None, cur_user=None):
 
        """
 
        Creates a new instance if not found, or updates current one
 

	
 
        :param username:
 
        :param password:
 
        :param email:
 
        :param active:
 
        :param firstname:
 
        :param lastname:
 
        :param active:
 
        :param admin:
 
        :param extern_name:
 
        :param extern_type:
 
        :param cur_user:
 
        """
 
        if not cur_user:
 
            cur_user = getattr(get_current_authuser(), 'username', None)
 

	
 
        from kallithea.lib.auth import get_crypt_password, check_password
 
        from kallithea.lib.hooks import log_create_user, \
 
            check_allowed_create_user
 
        user_data = {
 
            'username': username, 'password': password,
 
            'email': email, 'firstname': firstname, 'lastname': lastname,
 
            'active': active, 'admin': admin
 
        }
 
        # raises UserCreationError if it's not allowed
 
        check_allowed_create_user(user_data, cur_user)
 

	
 
        log.debug('Checking for %s account in Kallithea database', username)
 
        user = User.get_by_username(username, case_insensitive=True)
 
        if user is None:
 
            log.debug('creating new user %s', username)
 
            new_user = User()
 
            edit = False
 
        else:
 
            log.debug('updating user %s', username)
 
            new_user = user
 
            edit = True
 

	
 
        try:
 
            new_user.username = username
 
            new_user.admin = admin
 
            new_user.email = email
 
            new_user.active = active
 
            new_user.extern_name = extern_name
 
            new_user.extern_type = extern_type
 
            new_user.name = firstname
 
            new_user.lastname = lastname
 

	
 
            if not edit:
 
                new_user.api_key = generate_api_key()
 

	
 
            # set password only if creating an user or password is changed
 
            password_change = new_user.password and \
 
                not check_password(password, new_user.password)
 
            if not edit or password_change:
 
                reason = 'new password' if edit else 'new user'
 
                log.debug('Updating password reason=>%s', reason)
 
                new_user.password = get_crypt_password(password) \
 
                    if password else ''
 

	
 
            if user is None:
 
                Session().add(new_user)
 
                Session().flush() # make database assign new_user.user_id
 

	
 
            if not edit:
 
                log_create_user(new_user.get_dict(), cur_user)
 

	
 
            return new_user
 
        except (DatabaseError,):
 
            log.error(traceback.format_exc())
 
            raise
 

	
 
    def create_registration(self, form_data):
 
        from kallithea.model.notification import NotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        form_data['admin'] = False
 
        form_data['extern_type'] = User.DEFAULT_AUTH_TYPE
 
        form_data['extern_name'] = ''
 
        new_user = self.create(form_data)
 

	
 
        # notification to admins
 
        subject = _('New user registration')
 
        body = (
 
            u'New user registration\n'
 
            'New user registration\n'
 
            '---------------------\n'
 
            '- Username: {user.username}\n'
 
            '- Full Name: {user.full_name}\n'
 
            '- Email: {user.email}\n'
 
            ).format(user=new_user)
 
        edit_url = h.canonical_url('edit_user', id=new_user.user_id)
 
        email_kwargs = {
 
            'registered_user_url': edit_url,
 
            'new_username': new_user.username,
 
            'new_email': new_user.email,
 
            'new_full_name': new_user.full_name}
 
        NotificationModel().create(created_by=new_user, subject=subject,
 
                                   body=body, recipients=None,
 
                                   type_=NotificationModel.TYPE_REGISTRATION,
 
                                   email_kwargs=email_kwargs)
 

	
 
    def update(self, user_id, form_data, skip_attrs=None):
 
        from kallithea.lib.auth import get_crypt_password
 
        skip_attrs = skip_attrs or []
 
        user = self.get(user_id, cache=False)
 
        if user.is_default_user:
 
            raise DefaultUserException(
 
                            _("You can't edit this user since it's "
 
                              "crucial for entire application"))
 

	
 
        for k, v in form_data.items():
 
            if k in skip_attrs:
 
                continue
 
            if k == 'new_password' and v:
 
                user.password = get_crypt_password(v)
 
            else:
 
                # old legacy thing orm models store firstname as name,
 
                # need proper refactor to username
 
                if k == 'firstname':
 
                    k = 'name'
 
                setattr(user, k, v)
 

	
 
    def update_user(self, user, **kwargs):
 
        from kallithea.lib.auth import get_crypt_password
 

	
 
        user = User.guess_instance(user)
 
        if user.is_default_user:
 
            raise DefaultUserException(
 
                _("You can't edit this user since it's"
 
                  " crucial for entire application")
 
            )
 

	
 
        for k, v in kwargs.items():
 
@@ -263,98 +263,98 @@ class UserModel(object):
 
            usergroups = [x.users_group_name for x in user.user_groups]
 
            raise UserOwnsReposException(
 
                _('User "%s" still owns %s user groups and cannot be '
 
                  'removed. Switch owners or remove those user groups: %s')
 
                % (user.username, len(usergroups), ', '.join(usergroups)))
 
        Session().delete(user)
 

	
 
        from kallithea.lib.hooks import log_delete_user
 
        log_delete_user(user.get_dict(), cur_user)
 

	
 
    def can_change_password(self, user):
 
        from kallithea.lib import auth_modules
 
        managed_fields = auth_modules.get_managed_fields(user)
 
        return 'password' not in managed_fields
 

	
 
    def get_reset_password_token(self, user, timestamp, session_id):
 
        """
 
        The token is a 40-digit hexstring, calculated as a HMAC-SHA1.
 

	
 
        In a traditional HMAC scenario, an attacker is unable to know or
 
        influence the secret key, but can know or influence the message
 
        and token. This scenario is slightly different (in particular
 
        since the message sender is also the message recipient), but
 
        sufficiently similar to use an HMAC. Benefits compared to a plain
 
        SHA1 hash includes resistance against a length extension attack.
 

	
 
        The HMAC key consists of the following values (known only to the
 
        server and authorized users):
 

	
 
        * per-application secret (the `app_instance_uuid` setting), without
 
          which an attacker cannot counterfeit tokens
 
        * hashed user password, invalidating the token upon password change
 

	
 
        The HMAC message consists of the following values (potentially known
 
        to an attacker):
 

	
 
        * session ID (the anti-CSRF token), requiring an attacker to have
 
          access to the browser session in which the token was created
 
        * numeric user ID, limiting the token to a specific user (yet allowing
 
          users to be renamed)
 
        * user email address
 
        * time of token issue (a Unix timestamp, to enable token expiration)
 

	
 
        The key and message values are separated by NUL characters, which are
 
        guaranteed not to occur in any of the values.
 
        """
 
        app_secret = config.get('app_instance_uuid')
 
        return hmac.HMAC(
 
            key=u'\0'.join([app_secret, user.password]).encode('utf-8'),
 
            msg=u'\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'),
 
            key='\0'.join([app_secret, user.password]).encode('utf-8'),
 
            msg='\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'),
 
            digestmod=hashlib.sha1,
 
        ).hexdigest()
 

	
 
    def send_reset_password_email(self, data):
 
        """
 
        Sends email with a password reset token and link to the password
 
        reset confirmation page with all information (including the token)
 
        pre-filled. Also returns URL of that page, only without the token,
 
        allowing users to copy-paste or manually enter the token from the
 
        email.
 
        """
 
        from kallithea.lib.celerylib import tasks
 
        from kallithea.model.notification import EmailNotificationModel
 
        import kallithea.lib.helpers as h
 

	
 
        user_email = data['email']
 
        user = User.get_by_email(user_email)
 
        timestamp = int(time.time())
 
        if user is not None:
 
            if self.can_change_password(user):
 
                log.debug('password reset user %s found', user)
 
                token = self.get_reset_password_token(user,
 
                                                      timestamp,
 
                                                      h.session_csrf_secret_token())
 
                # URL must be fully qualified; but since the token is locked to
 
                # the current browser session, we must provide a URL with the
 
                # current scheme and hostname, rather than the canonical_url.
 
                link = h.url('reset_password_confirmation', qualified=True,
 
                             email=user_email,
 
                             timestamp=timestamp,
 
                             token=token)
 
            else:
 
                log.debug('password reset user %s found but was managed', user)
 
                token = link = None
 
            reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET
 
            body = EmailNotificationModel().get_email_tmpl(
 
                reg_type, 'txt',
 
                user=user.short_contact,
 
                reset_token=token,
 
                reset_url=link)
 
            html_body = EmailNotificationModel().get_email_tmpl(
 
                reg_type, 'html',
 
                user=user.short_contact,
 
                reset_token=token,
 
                reset_url=link)
 
            log.debug('sending email')
 
            tasks.send_email([user_email], _("Password reset link"), body, html_body)
 
            log.info('send new password mail to %s', user_email)
kallithea/model/user_group.py
Show inline comments
 
@@ -322,61 +322,61 @@ class UserGroupModel(object):
 
        if obj is None:
 
            # create new !
 
            obj = UserGroupUserGroupToPerm()
 
            Session().add(obj)
 
        obj.user_group = user_group
 
        obj.target_user_group = target_user_group
 
        obj.permission = permission
 
        log.debug('Granted perm %s to %s on %s', perm, target_user_group, user_group)
 
        return obj
 

	
 
    def revoke_user_group_permission(self, target_user_group, user_group):
 
        """
 
        Revoke user group permission for given target_user_group
 

	
 
        :param target_user_group:
 
        :param user_group:
 
        """
 
        target_user_group = UserGroup.guess_instance(target_user_group)
 
        user_group = UserGroup.guess_instance(user_group)
 

	
 
        obj = UserGroupUserGroupToPerm.query() \
 
            .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group) \
 
            .filter(UserGroupUserGroupToPerm.user_group == user_group) \
 
            .scalar()
 
        if obj is not None:
 
            Session().delete(obj)
 
            log.debug('Revoked perm on %s on %s', target_user_group, user_group)
 

	
 
    def enforce_groups(self, user, groups, extern_type=None):
 
        user = User.guess_instance(user)
 
        log.debug('Enforcing groups %s on user %s', user, groups)
 
        current_groups = user.group_member
 
        # find the external created groups
 
        externals = [x.users_group for x in current_groups
 
                     if 'extern_type' in x.users_group.group_data]
 

	
 
        # calculate from what groups user should be removed
 
        # externals that are not in groups
 
        for gr in externals:
 
            if gr.users_group_name not in groups:
 
                log.debug('Removing user %s from user group %s', user, gr)
 
                self.remove_user_from_group(gr, user)
 

	
 
        # now we calculate in which groups user should be == groups params
 
        owner = User.get_first_admin().username
 
        for gr in set(groups):
 
            existing_group = UserGroup.get_by_group_name(gr)
 
            if not existing_group:
 
                desc = u'Automatically created from plugin:%s' % extern_type
 
                desc = 'Automatically created from plugin:%s' % extern_type
 
                # we use first admin account to set the owner of the group
 
                existing_group = UserGroupModel().create(gr, desc, owner,
 
                                        group_data={'extern_type': extern_type})
 

	
 
            # we can only add users to special groups created via plugins
 
            managed = 'extern_type' in existing_group.group_data
 
            if managed:
 
                log.debug('Adding user %s to user group %s', user, gr)
 
                UserGroupModel().add_user_to_group(existing_group, user)
 
            else:
 
                log.debug('Skipping addition to group %s since it is '
 
                          'not managed by auth plugins' % gr)
kallithea/model/validators.py
Show inline comments
 
@@ -231,97 +231,97 @@ def ValidPassword():
 
        messages = {
 
            'invalid_password':
 
                _('Invalid characters (non-ascii) in password')
 
        }
 

	
 
        def _validate_python(self, value, state):
 
            try:
 
                (value or '').encode('ascii')
 
            except UnicodeError:
 
                msg = self.message('invalid_password', state)
 
                raise formencode.Invalid(msg, value, state,)
 
    return _validator
 

	
 

	
 
def ValidOldPassword(username):
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'invalid_password': _('Invalid old password')
 
        }
 

	
 
        def _validate_python(self, value, state):
 
            from kallithea.lib import auth_modules
 
            if auth_modules.authenticate(username, value, '') is None:
 
                msg = self.message('invalid_password', state)
 
                raise formencode.Invalid(msg, value, state,
 
                    error_dict=dict(current_password=msg)
 
                )
 
    return _validator
 

	
 

	
 
def ValidPasswordsMatch(password_field, password_confirmation_field):
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'password_mismatch': _('Passwords do not match'),
 
        }
 

	
 
        def _validate_python(self, value, state):
 
            if value.get(password_field) != value[password_confirmation_field]:
 
                msg = self.message('password_mismatch', state)
 
                raise formencode.Invalid(msg, value, state,
 
                     error_dict={password_field: msg, password_confirmation_field: msg}
 
                )
 
    return _validator
 

	
 

	
 
def ValidAuth():
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'invalid_auth': _(u'Invalid username or password'),
 
            'invalid_auth': _('Invalid username or password'),
 
        }
 

	
 
        def _validate_python(self, value, state):
 
            from kallithea.lib import auth_modules
 

	
 
            password = value['password']
 
            username = value['username']
 

	
 
            # authenticate returns unused dict but has called
 
            # plugin._authenticate which has create_or_update'ed the username user in db
 
            if auth_modules.authenticate(username, password) is None:
 
                user = User.get_by_username_or_email(username)
 
                if user and not user.active:
 
                    log.warning('user %s is disabled', username)
 
                    msg = self.message('invalid_auth', state)
 
                    raise formencode.Invalid(msg, value, state,
 
                        error_dict=dict(username=' ', password=msg)
 
                    )
 
                else:
 
                    log.warning('user %s failed to authenticate', username)
 
                    msg = self.message('invalid_auth', state)
 
                    raise formencode.Invalid(msg, value, state,
 
                        error_dict=dict(username=' ', password=msg)
 
                    )
 
    return _validator
 

	
 

	
 
def ValidRepoName(edit=False, old_data=None):
 
    old_data = old_data or {}
 

	
 
    class _validator(formencode.validators.FancyValidator):
 
        messages = {
 
            'invalid_repo_name':
 
                _('Repository name %(repo)s is not allowed'),
 
            'repository_exists':
 
                _('Repository named %(repo)s already exists'),
 
            'repository_in_group_exists': _('Repository "%(repo)s" already '
 
                                            'exists in group "%(group)s"'),
 
            'same_group_exists': _('Repository group with name "%(repo)s" '
 
                                   'already exists')
 
        }
 

	
 
        def _convert_to_python(self, value, state):
 
            repo_name = repo_name_slug(value.get('repo_name', ''))
 
            repo_group = value.get('repo_group')
 
            if repo_group:
 
                gr = RepoGroup.get(repo_group)
 
                group_path = gr.full_path
kallithea/tests/api/api_base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
Tests for the JSON-RPC web api.
 
"""
 

	
 
import os
 
import random
 
import re
 

	
 
import mock
 
import pytest
 

	
 
from kallithea.lib import ext_json
 
from kallithea.lib.auth import AuthUser
 
from kallithea.lib.utils2 import ascii_bytes
 
from kallithea.model.changeset_status import ChangesetStatusModel
 
from kallithea.model.db import ChangesetStatus, PullRequest, RepoGroup, Repository, Setting, Ui, User
 
from kallithea.model.gist import GistModel
 
from kallithea.model.meta import Session
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.user import UserModel
 
from kallithea.model.user_group import UserGroupModel
 
from kallithea.tests import base
 
from kallithea.tests.fixture import Fixture
 

	
 

	
 
API_URL = '/_admin/api'
 
TEST_USER_GROUP = u'test_user_group'
 
TEST_REPO_GROUP = u'test_repo_group'
 
TEST_USER_GROUP = 'test_user_group'
 
TEST_REPO_GROUP = 'test_repo_group'
 

	
 
fixture = Fixture()
 

	
 

	
 
def _build_data(apikey, method, **kw):
 
    """
 
    Builds API data with given random ID
 
    For convenience, the json is returned as str
 
    """
 
    random_id = random.randrange(1, 9999)
 
    return random_id, ext_json.dumps({
 
        "id": random_id,
 
        "api_key": apikey,
 
        "method": method,
 
        "args": kw
 
    })
 

	
 

	
 
jsonify = lambda obj: ext_json.loads(ext_json.dumps(obj))
 

	
 

	
 
def crash(*args, **kwargs):
 
    raise Exception('Total Crash !')
 

	
 

	
 
def api_call(test_obj, params):
 
    response = test_obj.app.post(API_URL, content_type='application/json',
 
                                 params=params)
 
    return response
 

	
 

	
 
## helpers
 
def make_user_group(name=TEST_USER_GROUP):
 
    gr = fixture.create_user_group(name, cur_user=base.TEST_USER_ADMIN_LOGIN)
 
    UserGroupModel().add_user_to_group(user_group=gr,
 
                                       user=base.TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
def make_repo_group(name=TEST_REPO_GROUP):
 
    gr = fixture.create_repo_group(name, cur_user=base.TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
class _BaseTestApi(object):
 
    REPO = None
 
    REPO_TYPE = None
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        cls.usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        cls.apikey = cls.usr.api_key
 
        cls.test_user = UserModel().create_or_update(
 
            username='test-api',
 
            password='test',
 
            email='test@example.com',
 
            firstname=u'first',
 
            lastname=u'last'
 
            firstname='first',
 
            lastname='last'
 
        )
 
        Session().commit()
 
        cls.TEST_USER_LOGIN = cls.test_user.username
 
        cls.apikey_regular = cls.test_user.api_key
 
        cls.default_user_username = User.get_default_user().username
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def setup_method(self, method):
 
        make_user_group()
 
        make_repo_group()
 

	
 
    def teardown_method(self, method):
 
        fixture.destroy_user_group(TEST_USER_GROUP)
 
        fixture.destroy_gists()
 
        fixture.destroy_repo_group(TEST_REPO_GROUP)
 

	
 
    def _compare_ok(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': None,
 
            'result': expected
 
        })
 
        given = ext_json.loads(given)
 
        assert expected == given, (expected, given)
 

	
 
    def _compare_error(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': expected,
 
            'result': None
 
        })
 
        given = ext_json.loads(given)
 
        assert expected == given, (expected, given)
 

	
 
    def test_Optional_object(self):
 
        from kallithea.controllers.api.api import Optional
 

	
 
        option1 = Optional(None)
 
        assert '<Optional:%s>' % None == repr(option1)
 
        assert option1() is None
 

	
 
        assert 1 == Optional.extract(Optional(1))
 
        assert 'trololo' == Optional.extract('trololo')
 

	
 
    def test_Optional_OAttr(self):
 
@@ -236,143 +236,143 @@ class _BaseTestApi(object):
 
        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_that_does_not_exist(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "user `%s` does not exist" % 'trololo'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid(self):
 
        id_, params = _build_data(self.apikey, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(self.TEST_USER_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_with_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user',
 
                                  userid=self.TEST_USER_LOGIN)
 
        response = api_call(self, params)
 

	
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_pull_remote(self):
 
        # Note: pulling from local repos is a mis-feature - it will bypass access control
 
        # ... but ok, if the path already has been set in the database
 
        repo_name = u'test_pull'
 
        repo_name = 'test_pull'
 
        r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        # hack around that clone_uri can't be set to to a local path
 
        # (as shown by test_api_create_repo_clone_uri_local)
 
        r.clone_uri = os.path.join(Ui.get_by_key('paths', '/').ui_value, self.REPO)
 
        Session().commit()
 

	
 
        pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == repo_name)]
 

	
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=repo_name,)
 
        response = api_call(self, params)
 

	
 
        expected = {'msg': 'Pulled from `%s`' % repo_name,
 
                    'repository': repo_name}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == repo_name)]
 

	
 
        fixture.destroy_repo(repo_name)
 

	
 
        assert pre_cached_tip != post_cached_tip
 

	
 
    def test_api_pull_fork(self):
 
        fork_name = u'fork'
 
        fork_name = 'fork'
 
        fixture.create_fork(self.REPO, fork_name)
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=fork_name,)
 
        response = api_call(self, params)
 

	
 
        expected = {'msg': 'Pulled from `%s`' % fork_name,
 
                    'repository': fork_name}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_pull_error_no_remote_no_fork(self):
 
        # should fail because no clone_uri is set
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=self.REPO, )
 
        response = api_call(self, params)
 

	
 
        expected = 'Unable to pull changes from `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_pull_custom_remote(self):
 
        repo_name = u'test_pull_custom_remote'
 
        repo_name = 'test_pull_custom_remote'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 

	
 
        custom_remote_path = os.path.join(Ui.get_by_key('paths', '/').ui_value, self.REPO)
 

	
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=repo_name,
 
                                  clone_uri=custom_remote_path)
 
        response = api_call(self, params)
 

	
 
        expected = {'msg': 'Pulled from `%s`' % repo_name,
 
                    'repository': repo_name}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_rescan_repos(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos')
 
        response = api_call(self, params)
 

	
 
        expected = {'added': [], 'removed': []}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'repo_scan', crash)
 
    def test_api_rescann_error(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos', )
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during rescan repositories action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached()  # seed cache
 

	
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = {
 
            'msg': "Cache for repository `%s` was invalidated" % (self.REPO,),
 
            'repository': self.REPO
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'mark_for_invalidation', crash)
 
    def test_api_invalidate_cache_error(self):
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
@@ -444,264 +444,264 @@ class _BaseTestApi(object):
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    def test_api_create_user_with_extern_name(self):
 
        username = 'test_new_api_user_passwordless'
 
        email = username + "@example.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email, extern_name='internal')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    @mock.patch.object(UserModel, 'create_or_update', crash)
 
    def test_api_create_user_when_exception_happened(self):
 

	
 
        username = 'test_new_api_user'
 
        email = username + "@example.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email,
 
                                  password='trololo')
 
        response = api_call(self, params)
 
        expected = 'failed to create user `%s`' % username
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_delete_user(self):
 
        usr = UserModel().create_or_update(username=u'test_user',
 
                                           password=u'qweqwe',
 
                                           email=u'u232@example.com',
 
                                           firstname=u'u1', lastname=u'u1')
 
        usr = UserModel().create_or_update(username='test_user',
 
                                           password='qweqwe',
 
                                           email='u232@example.com',
 
                                           firstname='u1', lastname='u1')
 
        Session().commit()
 
        username = usr.username
 
        email = usr.email
 
        usr_id = usr.user_id
 
        ## DELETE THIS USER NOW
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user',
 
                                  userid=username, )
 
        response = api_call(self, params)
 

	
 
        ret = {'msg': 'deleted user ID:%s %s' % (usr_id, username),
 
               'user': None}
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserModel, 'delete', crash)
 
    def test_api_delete_user_when_exception_happened(self):
 
        usr = UserModel().create_or_update(username=u'test_user',
 
                                           password=u'qweqwe',
 
                                           email=u'u232@example.com',
 
                                           firstname=u'u1', lastname=u'u1')
 
        usr = UserModel().create_or_update(username='test_user',
 
                                           password='qweqwe',
 
                                           email='u232@example.com',
 
                                           firstname='u1', lastname='u1')
 
        Session().commit()
 
        username = usr.username
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user',
 
                                  userid=username, )
 
        response = api_call(self, params)
 
        ret = 'failed to delete user ID:%s %s' % (usr.user_id,
 
                                                  usr.username)
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @base.parametrize('name,expected', [
 
        ('firstname', 'new_username'),
 
        ('lastname', 'new_username'),
 
        ('email', 'new_username'),
 
        ('admin', True),
 
        ('admin', False),
 
        ('extern_type', 'ldap'),
 
        ('extern_type', None),
 
        ('extern_name', 'test'),
 
        ('extern_name', None),
 
        ('active', False),
 
        ('active', True),
 
        ('password', 'newpass'),
 
    ])
 
    def test_api_update_user(self, name, expected):
 
        usr = User.get_by_username(self.TEST_USER_LOGIN)
 
        kw = {name: expected,
 
              'userid': usr.user_id}
 
        id_, params = _build_data(self.apikey, 'update_user', **kw)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, self.TEST_USER_LOGIN),
 
            'user': jsonify(User \
 
                .get_by_username(self.TEST_USER_LOGIN) \
 
                .get_api_data())
 
        }
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_no_changed_params(self):
 
        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=base.TEST_USER_ADMIN_LOGIN)
 

	
 
        response = api_call(self, params)
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, base.TEST_USER_ADMIN_LOGIN),
 
            'user': ret
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_by_user_id(self):
 
        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, base.TEST_USER_ADMIN_LOGIN),
 
            'user': ret
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_default_user(self):
 
        usr = User.get_default_user()
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        expected = 'editing default user is forbidden'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserModel, 'update_user', crash)
 
    def test_api_update_user_when_exception_happens(self):
 
        usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        ret = 'failed to update user `%s`' % usr.user_id
 

	
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo(self):
 
        new_group = u'some_new_group'
 
        new_group = 'some_new_group'
 
        make_user_group(new_group)
 
        RepoModel().grant_user_group_permission(repo=self.REPO,
 
                                                group_name=new_group,
 
                                                perm='repository.read')
 
        Session().commit()
 
        id_, params = _build_data(self.apikey, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 
        assert u"tags" not in response.json[u'result']
 
        assert u'pull_requests' not in response.json[u'result']
 
        assert "tags" not in response.json['result']
 
        assert 'pull_requests' not in response.json['result']
 

	
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        ret = repo.get_api_data()
 

	
 
        members = []
 
        followers = []
 
        for user in repo.repo_to_perm:
 
            perm = user.permission.permission_name
 
            user = user.user
 
            user_data = {'name': user.username, 'type': "user",
 
                         'permission': perm}
 
            members.append(user_data)
 

	
 
        for user_group in repo.users_group_to_perm:
 
            perm = user_group.permission.permission_name
 
            user_group = user_group.users_group
 
            user_group_data = {'name': user_group.users_group_name,
 
                               'type': "user_group", 'permission': perm}
 
            members.append(user_group_data)
 

	
 
        for user in repo.followers:
 
            followers.append(user.user.get_api_data())
 

	
 
        ret['members'] = members
 
        ret['followers'] = followers
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_user_group(new_group)
 

	
 
        id_, params = _build_data(self.apikey, 'get_repo', repoid=self.REPO,
 
                                  with_revision_names=True,
 
                                  with_pullrequests=True)
 
        response = api_call(self, params)
 
        assert u"v0.2.0" in response.json[u'result'][u'tags']
 
        assert u'pull_requests' in response.json[u'result']
 
        assert "v0.2.0" in response.json['result']['tags']
 
        assert 'pull_requests' in response.json['result']
 

	
 
    @base.parametrize('grant_perm', [
 
        ('repository.admin'),
 
        ('repository.write'),
 
        ('repository.read'),
 
    ])
 
    def test_api_get_repo_by_non_admin(self, grant_perm):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=grant_perm)
 
        Session().commit()
 
        id_, params = _build_data(self.apikey_regular, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        ret = repo.get_api_data()
 

	
 
        members = []
 
        followers = []
 
        assert 2 == len(repo.repo_to_perm)
 
        for user in repo.repo_to_perm:
 
            perm = user.permission.permission_name
 
            user_obj = user.user
 
            user_data = {'name': user_obj.username, 'type': "user",
 
                         'permission': perm}
 
            members.append(user_data)
 

	
 
        for user_group in repo.users_group_to_perm:
 
            perm = user_group.permission.permission_name
 
            user_group_obj = user_group.users_group
 
            user_group_data = {'name': user_group_obj.users_group_name,
 
                               'type': "user_group", 'permission': perm}
 
            members.append(user_group_data)
 

	
 
        for user in repo.followers:
 
            followers.append(user.user.get_api_data())
 

	
 
        ret['members'] = members
 
        ret['followers'] = followers
 

	
 
        expected = ret
 
        try:
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 

	
 
    def test_api_get_repo_by_non_admin_no_permission_to_repo(self):
 
@@ -793,938 +793,938 @@ class _BaseTestApi(object):
 
                                  root_path=path, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to get repo: `%s` nodes' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_ret_type(self):
 
        rev = 'tip'
 
        path = '/'
 
        ret_type = 'error'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        expected = ('ret_type must be one of %s'
 
                    % (','.join(sorted(['files', 'dirs', 'all']))))
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @base.parametrize('name,ret_type,grant_perm', [
 
        ('all', 'all', 'repository.write'),
 
        ('dirs', 'dirs', 'repository.admin'),
 
        ('files', 'files', 'repository.read'),
 
    ])
 
    def test_api_get_repo_nodes_by_regular_user(self, name, ret_type, grant_perm):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=grant_perm)
 
        Session().commit()
 

	
 
        rev = 'tip'
 
        path = '/'
 
        id_, params = _build_data(self.apikey_regular, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        # we don't the actual return types here since it's tested somewhere
 
        # else
 
        expected = response.json['result']
 
        try:
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 

	
 
    def test_api_create_repo(self):
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        assert repo is not None
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    @base.parametrize('repo_name', [
 
        u'',
 
        u'.',
 
        u'..',
 
        u':',
 
        u'/',
 
        u'<test>',
 
        '',
 
        '.',
 
        '..',
 
        ':',
 
        '/',
 
        '<test>',
 
    ])
 
    def test_api_create_repo_bad_names(self, repo_name):
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 
        if repo_name == '/':
 
            expected = "repo group `` not found"
 
            self._compare_error(id_, expected, given=response.body)
 
        else:
 
            expected = "failed to create repository `%s`" % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_clone_uri_local(self):
 
        # cloning from local repos was a mis-feature - it would bypass access control
 
        # TODO: introduce other test coverage of actual remote cloning
 
        clone_uri = os.path.join(base.TESTS_TMP_PATH, self.REPO)
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,
 
                                  clone_uri=clone_uri,
 
        )
 
        response = api_call(self, params)
 
        expected = "failed to create repository `%s`" % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_and_repo_group(self):
 
        repo_group_name = u'my_gr'
 
        repo_name = u'%s/api-repo' % repo_group_name
 
        repo_group_name = 'my_gr'
 
        repo_name = '%s/api-repo' % repo_group_name
 

	
 
        # repo creation can no longer also create repo group
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = u'repo group `%s` not found' % repo_group_name
 
        expected = 'repo group `%s` not found' % repo_group_name
 
        self._compare_error(id_, expected, given=response.body)
 
        assert RepoModel().get_by_repo_name(repo_name) is None
 

	
 
        # create group before creating repo
 
        rg = fixture.create_repo_group(repo_group_name)
 
        Session().commit()
 

	
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        assert repo is not None
 

	
 
        fixture.destroy_repo(repo_name)
 
        fixture.destroy_repo_group(repo_group_name)
 

	
 
    def test_api_create_repo_in_repo_group_without_permission(self):
 
        repo_group_basename = u'api-repo-repo'
 
        repo_group_name = u'%s/%s' % (TEST_REPO_GROUP, repo_group_basename)
 
        repo_name = u'%s/api-repo' % repo_group_name
 
        repo_group_basename = 'api-repo-repo'
 
        repo_group_name = '%s/%s' % (TEST_REPO_GROUP, repo_group_basename)
 
        repo_name = '%s/api-repo' % repo_group_name
 

	
 
        top_group = RepoGroup.get_by_group_name(TEST_REPO_GROUP)
 
        assert top_group
 
        rg = fixture.create_repo_group(repo_group_basename, parent_group_id=top_group)
 
        Session().commit()
 
        RepoGroupModel().grant_user_permission(repo_group_name,
 
                                               self.TEST_USER_LOGIN,
 
                                               'group.none')
 
        Session().commit()
 

	
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        # Current result when API access control is different from Web:
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
        # Expected and arguably more correct result:
 
        #expected = 'failed to create repository `%s`' % repo_name
 
        #self._compare_error(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo_group(repo_group_name)
 

	
 
    def test_api_create_repo_unknown_owner(self):
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=owner,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 
        expected = 'user `%s` does not exist' % owner
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_repo_dont_specify_owner(self):
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        assert repo is not None
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_by_non_admin(self):
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        assert repo is not None
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_by_non_admin_specify_owner(self):
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
                                  owner=owner)
 
        response = api_call(self, params)
 

	
 
        expected = 'Only Kallithea admin can specify `owner` param'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_exists(self):
 
        repo_name = self.REPO
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = "repo `%s` already exist" % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_repo_dot_dot(self):
 
        # it is only possible to create repositories in existing repo groups - and '..' can't be used
 
        group_name = '%s/..' % TEST_REPO_GROUP
 
        repo_name = '%s/%s' % (group_name, 'could-be-outside')
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = u'repo group `%s` not found' % group_name
 
        expected = 'repo group `%s` not found' % group_name
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    @mock.patch.object(RepoModel, 'create', crash)
 
    def test_api_create_repo_exception_occurred(self):
 
        repo_name = u'api-repo'
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = 'failed to create repository `%s`' % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @base.parametrize('changing_attr,updates', [
 
        ('owner', {'owner': base.TEST_USER_REGULAR_LOGIN}),
 
        ('description', {'description': u'new description'}),
 
        ('description', {'description': 'new description'}),
 
        ('clone_uri', {'clone_uri': 'http://example.com/repo'}), # will fail - pulling from non-existing repo should fail
 
        ('clone_uri', {'clone_uri': '/repo'}), # will fail - pulling from local repo was a mis-feature - it would bypass access control
 
        ('clone_uri', {'clone_uri': None}),
 
        ('landing_rev', {'landing_rev': 'branch:master'}),
 
        ('enable_statistics', {'enable_statistics': True}),
 
        ('enable_downloads', {'enable_downloads': True}),
 
        ('name', {'name': u'new_repo_name'}),
 
        ('repo_group', {'group': u'test_group_for_update'}),
 
        ('name', {'name': 'new_repo_name'}),
 
        ('repo_group', {'group': 'test_group_for_update'}),
 
    ])
 
    def test_api_update_repo(self, changing_attr, updates):
 
        repo_name = u'api_update_me'
 
        repo_name = 'api_update_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        if changing_attr == 'repo_group':
 
            fixture.create_repo_group(updates['group'])
 

	
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        if changing_attr == 'name':
 
            repo_name = updates['name']
 
        if changing_attr == 'repo_group':
 
            repo_name = u'/'.join([updates['group'], repo_name])
 
            repo_name = '/'.join([updates['group'], repo_name])
 
        try:
 
            if changing_attr == 'clone_uri' and updates['clone_uri']:
 
                expected = u'failed to update repo `%s`' % repo_name
 
                expected = 'failed to update repo `%s`' % repo_name
 
                self._compare_error(id_, expected, given=response.body)
 
            else:
 
                expected = {
 
                    'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
 
                    'repository': repo.get_api_data()
 
                }
 
                self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            if changing_attr == 'repo_group':
 
                fixture.destroy_repo_group(updates['group'])
 

	
 
    @base.parametrize('changing_attr,updates', [
 
        ('owner', {'owner': base.TEST_USER_REGULAR_LOGIN}),
 
        ('description', {'description': u'new description'}),
 
        ('description', {'description': 'new description'}),
 
        ('clone_uri', {'clone_uri': 'http://example.com/repo'}), # will fail - pulling from non-existing repo should fail
 
        ('clone_uri', {'clone_uri': '/repo'}), # will fail - pulling from local repo was a mis-feature - it would bypass access control
 
        ('clone_uri', {'clone_uri': None}),
 
        ('landing_rev', {'landing_rev': 'branch:master'}),
 
        ('enable_statistics', {'enable_statistics': True}),
 
        ('enable_downloads', {'enable_downloads': True}),
 
        ('name', {'name': u'new_repo_name'}),
 
        ('repo_group', {'group': u'test_group_for_update'}),
 
        ('name', {'name': 'new_repo_name'}),
 
        ('repo_group', {'group': 'test_group_for_update'}),
 
    ])
 
    def test_api_update_group_repo(self, changing_attr, updates):
 
        group_name = u'lololo'
 
        group_name = 'lololo'
 
        fixture.create_repo_group(group_name)
 
        repo_name = u'%s/api_update_me' % group_name
 
        repo_name = '%s/api_update_me' % group_name
 
        repo = fixture.create_repo(repo_name, repo_group=group_name, repo_type=self.REPO_TYPE)
 
        if changing_attr == 'repo_group':
 
            fixture.create_repo_group(updates['group'])
 

	
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        if changing_attr == 'name':
 
            repo_name = u'%s/%s' % (group_name, updates['name'])
 
            repo_name = '%s/%s' % (group_name, updates['name'])
 
        if changing_attr == 'repo_group':
 
            repo_name = u'/'.join([updates['group'], repo_name.rsplit('/', 1)[-1]])
 
            repo_name = '/'.join([updates['group'], repo_name.rsplit('/', 1)[-1]])
 
        try:
 
            if changing_attr == 'clone_uri' and updates['clone_uri']:
 
                expected = u'failed to update repo `%s`' % repo_name
 
                expected = 'failed to update repo `%s`' % repo_name
 
                self._compare_error(id_, expected, given=response.body)
 
            else:
 
                expected = {
 
                    'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
 
                    'repository': repo.get_api_data()
 
                }
 
                self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            if changing_attr == 'repo_group':
 
                fixture.destroy_repo_group(updates['group'])
 
        fixture.destroy_repo_group(group_name)
 

	
 
    def test_api_update_repo_repo_group_does_not_exist(self):
 
        repo_name = u'admin_owned'
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name)
 
        updates = {'group': 'test_group_for_update'}
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'repository group `%s` does not exist' % updates['group']
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_update_repo_regular_user_not_allowed(self):
 
        repo_name = u'admin_owned'
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name)
 
        updates = {'description': 'something else'}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'repository `%s` does not exist' % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    @mock.patch.object(RepoModel, 'update', crash)
 
    def test_api_update_repo_exception_occurred(self):
 
        repo_name = u'api_update_me'
 
        repo_name = 'api_update_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, owner=base.TEST_USER_ADMIN_LOGIN,)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'failed to update repo `%s`' % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_repo_name(self):
 
        repo_name = u'admin_owned'
 
        new_repo_name = u'new_repo_name'
 
        repo_name = 'admin_owned'
 
        new_repo_name = 'new_repo_name'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        UserModel().revoke_perm('default', 'hg.create.repository')
 
        UserModel().grant_perm('default', 'hg.create.none')
 
        updates = {'name': new_repo_name}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'no permission to create (or move) repositories'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            fixture.destroy_repo(new_repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_repo_name_allowed(self):
 
        repo_name = u'admin_owned'
 
        new_repo_name = u'new_repo_name'
 
        repo_name = 'admin_owned'
 
        new_repo_name = 'new_repo_name'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        UserModel().revoke_perm('default', 'hg.create.none')
 
        UserModel().grant_perm('default', 'hg.create.repository')
 
        updates = {'name': new_repo_name}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = {
 
                'msg': 'updated repo ID:%s %s' % (repo.repo_id, new_repo_name),
 
                'repository': repo.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            fixture.destroy_repo(new_repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_owner(self):
 
        repo_name = u'admin_owned'
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        updates = {'owner': base.TEST_USER_ADMIN_LOGIN}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'Only Kallithea admin can specify `owner` param'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo(self):
 
        repo_name = u'api_delete_me'
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 

	
 
        id_, params = _build_data(self.apikey, 'delete_repo',
 
                                  repoid=repo_name, )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Deleted repository `%s`' % repo_name,
 
            'success': True
 
        }
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_by_non_admin(self):
 
        repo_name = u'api_delete_me'
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        id_, params = _build_data(self.apikey_regular, 'delete_repo',
 
                                  repoid=repo_name, )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Deleted repository `%s`' % repo_name,
 
            'success': True
 
        }
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_by_non_admin_no_permission(self):
 
        repo_name = u'api_delete_me'
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'delete_repo',
 
                                      repoid=repo_name, )
 
            response = api_call(self, params)
 
            expected = 'repository `%s` does not exist' % (repo_name)
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_exception_occurred(self):
 
        repo_name = u'api_delete_me'
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        try:
 
            with mock.patch.object(RepoModel, 'delete', crash):
 
                id_, params = _build_data(self.apikey, 'delete_repo',
 
                                          repoid=repo_name, )
 
                response = api_call(self, params)
 

	
 
                expected = 'failed to delete repository `%s`' % repo_name
 
                self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_fork_repo(self):
 
        fork_name = u'api-repo-fork'
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
 
                                                     fork_name),
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    @base.parametrize('fork_name', [
 
        u'api-repo-fork',
 
        u'%s/api-repo-fork' % TEST_REPO_GROUP,
 
        'api-repo-fork',
 
        '%s/api-repo-fork' % TEST_REPO_GROUP,
 
    ])
 
    def test_api_fork_repo_non_admin(self, fork_name):
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
 
                                                     fork_name),
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin_specify_owner(self):
 
        fork_name = u'api-repo-fork'
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 
        expected = 'Only Kallithea admin can specify `owner` param'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin_no_permission_to_fork(self):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.default_user_username,
 
                                          perm='repository.none')
 
        try:
 
            fork_name = u'api-repo-fork'
 
            fork_name = 'api-repo-fork'
 
            id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                      repoid=self.REPO,
 
                                      fork_name=fork_name,
 
            )
 
            response = api_call(self, params)
 
            expected = 'repository `%s` does not exist' % (self.REPO)
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().grant_user_permission(repo=self.REPO,
 
                                              user=self.default_user_username,
 
                                              perm='repository.read')
 
            fixture.destroy_repo(fork_name)
 

	
 
    @base.parametrize('name,perm', [
 
        ('read', 'repository.read'),
 
        ('write', 'repository.write'),
 
        ('admin', 'repository.admin'),
 
    ])
 
    def test_api_fork_repo_non_admin_no_create_repo_permission(self, name, perm):
 
        fork_name = u'api-repo-fork'
 
        fork_name = 'api-repo-fork'
 
        # regardless of base repository permission, forking is disallowed
 
        # when repository creation is disabled
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=perm)
 
        UserModel().revoke_perm('default', 'hg.create.repository')
 
        UserModel().grant_perm('default', 'hg.create.none')
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 
        expected = 'no permission to create repositories'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_unknown_owner(self):
 
        fork_name = u'api-repo-fork'
 
        fork_name = 'api-repo-fork'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=owner,
 
        )
 
        response = api_call(self, params)
 
        expected = 'user `%s` does not exist' % owner
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_fork_repo_fork_exists(self):
 
        fork_name = u'api-repo-fork'
 
        fork_name = 'api-repo-fork'
 
        fixture.create_fork(self.REPO, fork_name)
 

	
 
        try:
 
            fork_name = u'api-repo-fork'
 
            fork_name = 'api-repo-fork'
 

	
 
            id_, params = _build_data(self.apikey, 'fork_repo',
 
                                      repoid=self.REPO,
 
                                      fork_name=fork_name,
 
                                      owner=base.TEST_USER_ADMIN_LOGIN,
 
            )
 
            response = api_call(self, params)
 

	
 
            expected = "fork `%s` already exist" % fork_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_repo_exists(self):
 
        fork_name = self.REPO
 

	
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        expected = "repo `%s` already exist" % fork_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'create_fork', crash)
 
    def test_api_fork_repo_exception_occurred(self):
 
        fork_name = u'api-repo-fork'
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=base.TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to fork repository `%s` as `%s`' % (self.REPO,
 
                                                               fork_name)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_group(self):
 
        id_, params = _build_data(self.apikey, 'get_user_group',
 
                                  usergroupid=TEST_USER_GROUP)
 
        response = api_call(self, params)
 

	
 
        user_group = UserGroupModel().get_group(TEST_USER_GROUP)
 
        members = []
 
        for user in user_group.members:
 
            user = user.user
 
            members.append(user.get_api_data())
 

	
 
        ret = user_group.get_api_data()
 
        ret['members'] = members
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_groups(self):
 
        gr_name = u'test_user_group2'
 
        gr_name = 'test_user_group2'
 
        make_user_group(gr_name)
 

	
 
        try:
 
            id_, params = _build_data(self.apikey, 'get_user_groups', )
 
            response = api_call(self, params)
 

	
 
            expected = []
 
            for gr_name in [TEST_USER_GROUP, u'test_user_group2']:
 
            for gr_name in [TEST_USER_GROUP, 'test_user_group2']:
 
                user_group = UserGroupModel().get_group(gr_name)
 
                ret = user_group.get_api_data()
 
                expected.append(ret)
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_create_user_group(self):
 
        group_name = u'some_new_group'
 
        group_name = 'some_new_group'
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=group_name)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'created new user group `%s`' % group_name,
 
            'user_group': jsonify(UserGroupModel() \
 
                .get_by_name(group_name) \
 
                .get_api_data())
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_user_group(group_name)
 

	
 
    def test_api_get_user_group_that_exist(self):
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=TEST_USER_GROUP)
 
        response = api_call(self, params)
 

	
 
        expected = "user group `%s` already exist" % TEST_USER_GROUP
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserGroupModel, 'create', crash)
 
    def test_api_get_user_group_exception_occurred(self):
 
        group_name = u'exception_happens'
 
        group_name = 'exception_happens'
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=group_name)
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to create group `%s`' % group_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @base.parametrize('changing_attr,updates', [
 
        ('group_name', {'group_name': u'new_group_name'}),
 
        ('group_name', {'group_name': u'test_group_for_update'}),
 
        ('group_name', {'group_name': 'new_group_name'}),
 
        ('group_name', {'group_name': 'test_group_for_update'}),
 
        ('owner', {'owner': base.TEST_USER_REGULAR_LOGIN}),
 
        ('active', {'active': False}),
 
        ('active', {'active': True}),
 
    ])
 
    def test_api_update_user_group(self, changing_attr, updates):
 
        gr_name = u'test_group_for_update'
 
        gr_name = 'test_group_for_update'
 
        user_group = fixture.create_user_group(gr_name)
 
        try:
 
            id_, params = _build_data(self.apikey, 'update_user_group',
 
                                      usergroupid=gr_name, **updates)
 
            response = api_call(self, params)
 
            expected = {
 
               'msg': 'updated user group ID:%s %s' % (user_group.users_group_id,
 
                                                     user_group.users_group_name),
 
               'user_group': user_group.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            if changing_attr == 'group_name':
 
                # switch to updated name for proper cleanup
 
                gr_name = updates['group_name']
 
            fixture.destroy_user_group(gr_name)
 

	
 
    @mock.patch.object(UserGroupModel, 'update', crash)
 
    def test_api_update_user_group_exception_occurred(self):
 
        gr_name = u'test_group'
 
        gr_name = 'test_group'
 
        fixture.create_user_group(gr_name)
 
        try:
 
            id_, params = _build_data(self.apikey, 'update_user_group',
 
                                      usergroupid=gr_name)
 
            response = api_call(self, params)
 
            expected = 'failed to update user group `%s`' % gr_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_add_user_to_user_group(self):
 
        gr_name = u'test_group'
 
        gr_name = 'test_group'
 
        fixture.create_user_group(gr_name)
 
        try:
 
            id_, params = _build_data(self.apikey, 'add_user_to_user_group',
 
                                      usergroupid=gr_name,
 
                                      userid=base.TEST_USER_ADMIN_LOGIN)
 
            response = api_call(self, params)
 
            expected = {
 
            'msg': 'added member `%s` to user group `%s`' % (
 
                    base.TEST_USER_ADMIN_LOGIN, gr_name),
 
            'success': True
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_add_user_to_user_group_that_doesnt_exist(self):
 
        id_, params = _build_data(self.apikey, 'add_user_to_user_group',
 
                                  usergroupid='false-group',
 
                                  userid=base.TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        expected = 'user group `%s` does not exist' % 'false-group'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserGroupModel, 'add_user_to_group', crash)
 
    def test_api_add_user_to_user_group_exception_occurred(self):
 
        gr_name = u'test_group'
 
        gr_name = 'test_group'
 
        fixture.create_user_group(gr_name)
 
        try:
 
            id_, params = _build_data(self.apikey, 'add_user_to_user_group',
 
                                      usergroupid=gr_name,
 
                                      userid=base.TEST_USER_ADMIN_LOGIN)
 
            response = api_call(self, params)
 
            expected = 'failed to add member to user group `%s`' % gr_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_remove_user_from_user_group(self):
 
        gr_name = u'test_group_3'
 
        gr_name = 'test_group_3'
 
        gr = fixture.create_user_group(gr_name)
 
        UserGroupModel().add_user_to_group(gr, user=base.TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 
        try:
 
            id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
 
                                      usergroupid=gr_name,
 
                                      userid=base.TEST_USER_ADMIN_LOGIN)
 
            response = api_call(self, params)
 
            expected = {
 
                'msg': 'removed member `%s` from user group `%s`' % (
 
                    base.TEST_USER_ADMIN_LOGIN, gr_name
 
                ),
 
                'success': True}
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    @mock.patch.object(UserGroupModel, 'remove_user_from_group', crash)
 
    def test_api_remove_user_from_user_group_exception_occurred(self):
 
        gr_name = u'test_group_3'
 
        gr_name = 'test_group_3'
 
        gr = fixture.create_user_group(gr_name)
 
        UserGroupModel().add_user_to_group(gr, user=base.TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 
        try:
 
            id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
 
                                      usergroupid=gr_name,
 
                                      userid=base.TEST_USER_ADMIN_LOGIN)
 
            response = api_call(self, params)
 
            expected = 'failed to remove member from user group `%s`' % gr_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_delete_user_group(self):
 
        gr_name = u'test_group'
 
        gr_name = 'test_group'
 
        ugroup = fixture.create_user_group(gr_name)
 
        gr_id = ugroup.users_group_id
 
        try:
 
            id_, params = _build_data(self.apikey, 'delete_user_group',
 
                                      usergroupid=gr_name)
 
            response = api_call(self, params)
 
            expected = {
 
                'user_group': None,
 
                'msg': 'deleted user group ID:%s %s' % (gr_id, gr_name)
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            if UserGroupModel().get_by_name(gr_name):
 
                fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_delete_user_group_that_is_assigned(self):
 
        gr_name = u'test_group'
 
        gr_name = 'test_group'
 
        ugroup = fixture.create_user_group(gr_name)
 
        gr_id = ugroup.users_group_id
 

	
 
        ugr_to_perm = RepoModel().grant_user_group_permission(self.REPO, gr_name, 'repository.write')
 
        msg = 'User Group assigned to %s' % ugr_to_perm.repository.repo_name
 

	
 
        try:
 
            id_, params = _build_data(self.apikey, 'delete_user_group',
 
                                      usergroupid=gr_name)
 
            response = api_call(self, params)
 
            expected = msg
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            if UserGroupModel().get_by_name(gr_name):
 
                fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_delete_user_group_exception_occurred(self):
 
        gr_name = u'test_group'
 
        gr_name = 'test_group'
 
        ugroup = fixture.create_user_group(gr_name)
 
        gr_id = ugroup.users_group_id
 
        id_, params = _build_data(self.apikey, 'delete_user_group',
 
                                  usergroupid=gr_name)
 

	
 
        try:
 
            with mock.patch.object(UserGroupModel, 'delete', crash):
 
                response = api_call(self, params)
 
                expected = 'failed to delete user group ID:%s %s' % (gr_id, gr_name)
 
                self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    @base.parametrize('name,perm', [
 
        ('none', 'repository.none'),
 
        ('read', 'repository.read'),
 
        ('write', 'repository.write'),
 
        ('admin', 'repository.admin'),
 
    ])
 
    def test_api_grant_user_permission(self, name, perm):
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=base.TEST_USER_ADMIN_LOGIN,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
 
                perm, base.TEST_USER_ADMIN_LOGIN, self.REPO
 
            ),
 
            'success': True
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_grant_user_permission_wrong_permission(self):
 
        perm = 'haha.no.permission'
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=base.TEST_USER_ADMIN_LOGIN,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        expected = 'permission `%s` does not exist' % perm
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
@@ -2422,146 +2422,146 @@ class _BaseTestApi(object):
 
        assert len(result) == 5
 
        assert 'message' in result[0]
 
        assert 'added' not in result[0]
 

	
 
    def test_api_get_changesets_with_file_list(self):
 
        id_, params = _build_data(self.apikey, 'get_changesets',
 
                                  repoid=self.REPO, start_date="2010-04-07T23:30:30", end_date="2010-04-08T00:31:14", with_file_list=True)
 
        response = api_call(self, params)
 
        result = ext_json.loads(response.body)["result"]
 
        assert len(result) == 3
 
        assert 'message' in result[0]
 
        assert 'added' in result[0]
 

	
 
    def test_api_get_changeset(self):
 
        review = fixture.review_changeset(self.REPO, self.TEST_REVISION, "approved")
 
        id_, params = _build_data(self.apikey, 'get_changeset',
 
                                  repoid=self.REPO, raw_id=self.TEST_REVISION)
 
        response = api_call(self, params)
 
        result = ext_json.loads(response.body)["result"]
 
        assert result["raw_id"] == self.TEST_REVISION
 
        assert "reviews" not in result
 

	
 
    def test_api_get_changeset_with_reviews(self):
 
        reviewobjs = fixture.review_changeset(self.REPO, self.TEST_REVISION, "approved")
 
        id_, params = _build_data(self.apikey, 'get_changeset',
 
                                  repoid=self.REPO, raw_id=self.TEST_REVISION,
 
                                  with_reviews=True)
 
        response = api_call(self, params)
 
        result = ext_json.loads(response.body)["result"]
 
        assert result["raw_id"] == self.TEST_REVISION
 
        assert "reviews" in result
 
        assert len(result["reviews"]) == 1
 
        review = result["reviews"][0]
 
        expected = {
 
            'status': 'approved',
 
            'modified_at': reviewobjs[0].modified_at.replace(microsecond=0).isoformat(),
 
            'reviewer': 'test_admin',
 
        }
 
        assert review == expected
 

	
 
    def test_api_get_changeset_that_does_not_exist(self):
 
        """ Fetch changeset status for non-existant changeset.
 
        revision id is the above git hash used in the test above with the
 
        last 3 nibbles replaced with 0xf.  Should not exist for git _or_ hg.
 
        """
 
        id_, params = _build_data(self.apikey, 'get_changeset',
 
                                  repoid=self.REPO, raw_id = '7ab37bc680b4aa72c34d07b230c866c28e9fcfff')
 
        response = api_call(self, params)
 
        expected = u'Changeset %s does not exist' % ('7ab37bc680b4aa72c34d07b230c866c28e9fcfff',)
 
        expected = 'Changeset %s does not exist' % ('7ab37bc680b4aa72c34d07b230c866c28e9fcfff',)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_changeset_without_permission(self):
 
        review = fixture.review_changeset(self.REPO, self.TEST_REVISION, "approved")
 
        RepoModel().revoke_user_permission(repo=self.REPO, user=self.TEST_USER_LOGIN)
 
        RepoModel().revoke_user_permission(repo=self.REPO, user="default")
 
        id_, params = _build_data(self.apikey_regular, 'get_changeset',
 
                                  repoid=self.REPO, raw_id=self.TEST_REVISION)
 
        response = api_call(self, params)
 
        expected = u'Access denied to repo %s' % self.REPO
 
        expected = 'Access denied to repo %s' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_pullrequest(self):
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u'get test')
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'get test')
 
        random_id = random.randrange(1, 9999)
 
        params = ascii_bytes(ext_json.dumps({
 
            "id": random_id,
 
            "api_key": self.apikey,
 
            "method": 'get_pullrequest',
 
            "args": {"pullrequest_id": pull_request_id},
 
        }))
 
        response = api_call(self, params)
 
        pullrequest = PullRequest().get(pull_request_id)
 
        expected = {
 
            "status": "new",
 
            "pull_request_id": pull_request_id,
 
            "description": "No description",
 
            "url": "/%s/pull-request/%s/_/%s" % (self.REPO, pull_request_id, "stable"),
 
            "reviewers": [{"username": "test_regular"}],
 
            "org_repo_url": "http://localhost:80/%s" % self.REPO,
 
            "org_ref_parts": ["branch", "stable", self.TEST_PR_SRC],
 
            "other_ref_parts": ["branch", "default", self.TEST_PR_DST],
 
            "comments": [{"username": base.TEST_USER_ADMIN_LOGIN, "text": "",
 
                         "comment_id": pullrequest.comments[0].comment_id}],
 
            "owner": base.TEST_USER_ADMIN_LOGIN,
 
            "statuses": [{"status": "under_review", "reviewer": base.TEST_USER_ADMIN_LOGIN, "modified_at": "2000-01-01T00:00:00"} for i in range(0, len(self.TEST_PR_REVISIONS))],
 
            "title": "get test",
 
            "revisions": self.TEST_PR_REVISIONS,
 
        }
 
        self._compare_ok(random_id, expected,
 
                         given=re.sub(br"\d\d\d\d\-\d\d\-\d\dT\d\d\:\d\d\:\d\d",
 
                                      b"2000-01-01T00:00:00", response.body))
 

	
 
    def test_api_close_pullrequest(self):
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u'close test')
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, 'close test')
 
        random_id = random.randrange(1, 9999)
 
        params = ascii_bytes(ext_json.dumps({
 
            "id": random_id,
 
            "api_key": self.apikey,
 
            "method": "comment_pullrequest",
 
            "args": {"pull_request_id": pull_request_id, "close_pr": True},
 
        }))
 
        response = api_call(self, params)
 
        self._compare_ok(random_id, True, given=response.body)
 
        pullrequest = PullRequest().get(pull_request_id)
 
        assert pullrequest.comments[-1].text == ''
 
        assert pullrequest.status == PullRequest.STATUS_CLOSED
 
        assert pullrequest.is_closed() == True
 

	
 
    def test_api_status_pullrequest(self):
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u"status test")
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, "status test")
 

	
 
        random_id = random.randrange(1, 9999)
 
        params = ascii_bytes(ext_json.dumps({
 
            "id": random_id,
 
            "api_key": User.get_by_username(base.TEST_USER_REGULAR2_LOGIN).api_key,
 
            "method": "comment_pullrequest",
 
            "args": {"pull_request_id": pull_request_id, "status": ChangesetStatus.STATUS_APPROVED},
 
        }))
 
        response = api_call(self, params)
 
        pullrequest = PullRequest().get(pull_request_id)
 
        self._compare_error(random_id, "No permission to change pull request status. User needs to be admin, owner or reviewer.", given=response.body)
 
        assert ChangesetStatus.STATUS_UNDER_REVIEW == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
 
        params = ascii_bytes(ext_json.dumps({
 
            "id": random_id,
 
            "api_key": User.get_by_username(base.TEST_USER_REGULAR_LOGIN).api_key,
 
            "method": "comment_pullrequest",
 
            "args": {"pull_request_id": pull_request_id, "status": ChangesetStatus.STATUS_APPROVED},
 
        }))
 
        response = api_call(self, params)
 
        self._compare_ok(random_id, True, given=response.body)
 
        pullrequest = PullRequest().get(pull_request_id)
 
        assert ChangesetStatus.STATUS_APPROVED == ChangesetStatusModel().calculate_pull_request_result(pullrequest)[2]
 

	
 
    def test_api_comment_pullrequest(self):
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, u"comment test")
 
        pull_request_id = fixture.create_pullrequest(self, self.REPO, self.TEST_PR_SRC, self.TEST_PR_DST, "comment test")
 
        random_id = random.randrange(1, 9999)
 
        params = ascii_bytes(ext_json.dumps({
 
            "id": random_id,
 
            "api_key": self.apikey,
 
            "method": "comment_pullrequest",
 
            "args": {"pull_request_id": pull_request_id, "comment_msg": "Looks good to me"},
 
        }))
 
        response = api_call(self, params)
 
        self._compare_ok(random_id, True, given=response.body)
 
        pullrequest = PullRequest().get(pull_request_id)
 
        assert pullrequest.comments[-1].text == u'Looks good to me'
 
        assert pullrequest.comments[-1].text == 'Looks good to me'
kallithea/tests/api/test_api_git.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from kallithea.tests.api.api_base import _BaseTestApi
 
from kallithea.tests.base import GIT_REPO, GIT_TEST_REVISION, TestController
 

	
 

	
 
class TestGitApi(_BaseTestApi, TestController):
 
    REPO = GIT_REPO
 
    REPO_TYPE = 'git'
 
    TEST_REVISION = GIT_TEST_REVISION
 
    TEST_PR_SRC = u'c60f01b77c42dce653d6b1d3b04689862c261929'
 
    TEST_PR_DST = u'10cddef6b794696066fb346434014f0a56810218'
 
    TEST_PR_REVISIONS = [u'1bead5880d2dbe831762bf7fb439ba2919b75fdd',
 
                         u'9bcd3ecfc8832a8cd881c1c1bbe2d13ffa9d94c7',
 
                         u'283de4dfca8479875a1befb8d4059f3bbb725145',
 
                         u'c60f01b77c42dce653d6b1d3b04689862c261929']
 
    TEST_PR_SRC = 'c60f01b77c42dce653d6b1d3b04689862c261929'
 
    TEST_PR_DST = '10cddef6b794696066fb346434014f0a56810218'
 
    TEST_PR_REVISIONS = ['1bead5880d2dbe831762bf7fb439ba2919b75fdd',
 
                         '9bcd3ecfc8832a8cd881c1c1bbe2d13ffa9d94c7',
 
                         '283de4dfca8479875a1befb8d4059f3bbb725145',
 
                         'c60f01b77c42dce653d6b1d3b04689862c261929']
kallithea/tests/api/test_api_hg.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
from kallithea.tests.api.api_base import _BaseTestApi
 
from kallithea.tests.base import HG_REPO, HG_TEST_REVISION, TestController
 

	
 

	
 
class TestHgApi(_BaseTestApi, TestController):
 
    REPO = HG_REPO
 
    REPO_TYPE = 'hg'
 
    TEST_REVISION = HG_TEST_REVISION
 
    TEST_PR_SRC = u'4f7e2131323e0749a740c0a56ab68ae9269c562a'
 
    TEST_PR_DST = u'92831aebf2f8dd4879e897024b89d09af214df1c'
 
    TEST_PR_REVISIONS = [u'720bbdb27665d6262b313e8a541b654d0cbd5b27',
 
                         u'f41649565a9e89919a588a163e717b4084f8a3b1',
 
                         u'94f45ed825a113e61af7e141f44ca578374abef0',
 
                         u'fef5bfe1dc17611d5fb59a7f6f95c55c3606f933',
 
                         u'4f7e2131323e0749a740c0a56ab68ae9269c562a']
 
    TEST_PR_SRC = '4f7e2131323e0749a740c0a56ab68ae9269c562a'
 
    TEST_PR_DST = '92831aebf2f8dd4879e897024b89d09af214df1c'
 
    TEST_PR_REVISIONS = ['720bbdb27665d6262b313e8a541b654d0cbd5b27',
 
                         'f41649565a9e89919a588a163e717b4084f8a3b1',
 
                         '94f45ed825a113e61af7e141f44ca578374abef0',
 
                         'fef5bfe1dc17611d5fb59a7f6f95c55c3606f933',
 
                         '4f7e2131323e0749a740c0a56ab68ae9269c562a']
kallithea/tests/base.py
Show inline comments
 
@@ -21,107 +21,107 @@ import time
 

	
 
import pytest
 
from webtest import TestApp
 

	
 
from kallithea.lib.utils2 import ascii_str
 
from kallithea.model.db import User
 

	
 

	
 
log = logging.getLogger(__name__)
 

	
 
skipif = pytest.mark.skipif
 
parametrize = pytest.mark.parametrize
 

	
 
# Hack: These module global values MUST be set to actual values before running any tests. This is currently done by conftest.py.
 
url = None
 
testapp = None
 

	
 
__all__ = [
 
    'skipif', 'parametrize', 'url', 'TestController',
 
    'ldap_lib_installed', 'pam_lib_installed', 'invalidate_all_caches',
 
    'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO',
 
    'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
 
    'TEST_USER_ADMIN_EMAIL', 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
 
    'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
 
    'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'IP_ADDR',
 
    'TEST_HG_REPO', 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
 
    'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO',
 
    'GIT_REMOTE_REPO', 'HG_TEST_REVISION', 'GIT_TEST_REVISION',
 
]
 

	
 
## SOME GLOBALS FOR TESTS
 

	
 
TESTS_TMP_PATH = os.environ.get('KALLITHEA_TESTS_TMP_PATH', tempfile.mkdtemp(prefix='kallithea-test-'))
 

	
 
TEST_USER_ADMIN_LOGIN = 'test_admin'
 
TEST_USER_ADMIN_PASS = 'test12'
 
TEST_USER_ADMIN_EMAIL = 'test_admin@example.com'
 

	
 
TEST_USER_REGULAR_LOGIN = 'test_regular'
 
TEST_USER_REGULAR_PASS = 'test12'
 
TEST_USER_REGULAR_EMAIL = 'test_regular@example.com'
 

	
 
TEST_USER_REGULAR2_LOGIN = 'test_regular2'
 
TEST_USER_REGULAR2_PASS = 'test12'
 
TEST_USER_REGULAR2_EMAIL = 'test_regular2@example.com'
 

	
 
IP_ADDR = '127.0.0.127'
 

	
 
HG_REPO = u'vcs_test_hg'
 
GIT_REPO = u'vcs_test_git'
 
HG_REPO = 'vcs_test_hg'
 
GIT_REPO = 'vcs_test_git'
 

	
 
NEW_HG_REPO = u'vcs_test_hg_new'
 
NEW_GIT_REPO = u'vcs_test_git_new'
 
NEW_HG_REPO = 'vcs_test_hg_new'
 
NEW_GIT_REPO = 'vcs_test_git_new'
 

	
 
HG_FORK = u'vcs_test_hg_fork'
 
GIT_FORK = u'vcs_test_git_fork'
 
HG_FORK = 'vcs_test_hg_fork'
 
GIT_FORK = 'vcs_test_git_fork'
 

	
 
HG_TEST_REVISION = u"a53d9201d4bc278910d416d94941b7ea007ecd52"
 
GIT_TEST_REVISION = u"7ab37bc680b4aa72c34d07b230c866c28e9fc204"
 
HG_TEST_REVISION = "a53d9201d4bc278910d416d94941b7ea007ecd52"
 
GIT_TEST_REVISION = "7ab37bc680b4aa72c34d07b230c866c28e9fc204"
 

	
 

	
 
## VCS
 
uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
 

	
 
GIT_REMOTE_REPO = os.path.join(TESTS_TMP_PATH, GIT_REPO)
 

	
 
TEST_GIT_REPO = os.path.join(TESTS_TMP_PATH, GIT_REPO)
 
TEST_GIT_REPO_CLONE = os.path.join(TESTS_TMP_PATH, 'vcs-git-clone-%s' % uniq_suffix)
 
TEST_GIT_REPO_PULL = os.path.join(TESTS_TMP_PATH, 'vcs-git-pull-%s' % uniq_suffix)
 

	
 
HG_REMOTE_REPO = os.path.join(TESTS_TMP_PATH, HG_REPO)
 

	
 
TEST_HG_REPO = os.path.join(TESTS_TMP_PATH, HG_REPO)
 
TEST_HG_REPO_CLONE = os.path.join(TESTS_TMP_PATH, 'vcs-hg-clone-%s' % uniq_suffix)
 
TEST_HG_REPO_PULL = os.path.join(TESTS_TMP_PATH, 'vcs-hg-pull-%s' % uniq_suffix)
 

	
 
# By default, some of the tests will utilise locally available
 
# repositories stored within tar.gz archives as source for
 
# cloning. Should you wish to use some other, remote archive, simply
 
# uncomment these entries and/or update the URLs to use.
 
#
 
# GIT_REMOTE_REPO = 'git://github.com/codeinn/vcs.git'
 
# HG_REMOTE_REPO = 'http://bitbucket.org/marcinkuzminski/vcs'
 

	
 
# skip ldap tests if LDAP lib is not installed
 
ldap_lib_installed = False
 
try:
 
    import ldap
 
    ldap.API_VERSION
 
    ldap_lib_installed = True
 
except ImportError:
 
    # means that python-ldap is not installed
 
    pass
 

	
 
try:
 
    import pam
 
    pam.PAM_TEXT_INFO
 
    pam_lib_installed = True
 
except ImportError:
 
    pam_lib_installed = False
 

	
 

	
 
def invalidate_all_caches():
 
    """Invalidate all beaker caches currently configured.
 
    Useful when manipulating IP permissions in a test and changes need to take
 
    effect immediately.
 
    Note: Any use of this function is probably a workaround - it should be
 
@@ -135,60 +135,60 @@ class NullHandler(logging.Handler):
 
    def emit(self, record):
 
        pass
 

	
 

	
 
class TestController(object):
 
    """Pytest-style test controller"""
 

	
 
    # Note: pytest base classes cannot have an __init__ method
 

	
 
    @pytest.fixture(autouse=True)
 
    def app_fixture(self):
 
        h = NullHandler()
 
        logging.getLogger("kallithea").addHandler(h)
 
        self.app = TestApp(testapp)
 
        return self.app
 

	
 
    def log_user(self, username=TEST_USER_ADMIN_LOGIN,
 
                 password=TEST_USER_ADMIN_PASS):
 
        self._logged_username = username
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': username,
 
                                  'password': password,
 
                                  '_session_csrf_secret_token': self.session_csrf_secret_token()})
 

	
 
        if b'Invalid username or password' in response.body:
 
            pytest.fail('could not login using %s %s' % (username, password))
 

	
 
        assert response.status == '302 Found'
 
        self.assert_authenticated_user(response, username)
 

	
 
        response = response.follow()
 
        return response.session['authuser']
 

	
 
    def _get_logged_user(self):
 
        return User.get_by_username(self._logged_username)
 

	
 
    def assert_authenticated_user(self, response, expected_username):
 
        cookie = response.session.get('authuser')
 
        user = cookie and cookie.get('user_id')
 
        user = user and User.get(user)
 
        user = user and user.username
 
        assert user == expected_username
 

	
 
    def session_csrf_secret_token(self):
 
        return ascii_str(self.app.get(url('session_csrf_secret_token')).body)
 

	
 
    def checkSessionFlash(self, response, msg=None, skip=0, _matcher=lambda msg, m: msg in m):
 
        if 'flash' not in response.session:
 
            pytest.fail(u'msg `%s` not found - session has no flash:\n%s' % (msg, response))
 
            pytest.fail('msg `%s` not found - session has no flash:\n%s' % (msg, response))
 
        try:
 
            level, m = response.session['flash'][-1 - skip]
 
            if _matcher(msg, m):
 
                return
 
        except IndexError:
 
            pass
 
        pytest.fail(u'msg `%s` not found in session flash (skipping %s): %s' %
 
        pytest.fail('msg `%s` not found in session flash (skipping %s): %s' %
 
                    (msg, skip, ', '.join('`%s`' % m for level, m in response.session['flash'])))
 

	
 
    def checkSessionFlashRegex(self, response, regex, skip=0):
 
        self.checkSessionFlash(response, regex, skip=skip, _matcher=re.search)
kallithea/tests/fixture.py
Show inline comments
 
@@ -47,147 +47,147 @@ from kallithea.tests.base import (
 

	
 
log = logging.getLogger(__name__)
 

	
 
FIXTURES = os.path.join(dirname(dirname(os.path.abspath(__file__))), 'tests', 'fixtures')
 

	
 

	
 
def error_function(*args, **kwargs):
 
    raise Exception('Total Crash !')
 

	
 

	
 
class Fixture(object):
 

	
 
    def __init__(self):
 
        pass
 

	
 
    def anon_access(self, status):
 
        """
 
        Context manager for controlling anonymous access.
 
        Anon access will be set and committed, but restored again when exiting the block.
 

	
 
        Usage:
 

	
 
        fixture = Fixture()
 
        with fixture.anon_access(False):
 
            stuff
 
        """
 

	
 
        class context(object):
 
            def __enter__(self):
 
                anon = User.get_default_user()
 
                self._before = anon.active
 
                anon.active = status
 
                Session().commit()
 
                invalidate_all_caches()
 

	
 
            def __exit__(self, exc_type, exc_val, exc_tb):
 
                anon = User.get_default_user()
 
                anon.active = self._before
 
                Session().commit()
 

	
 
        return context()
 

	
 
    def _get_repo_create_params(self, **custom):
 
        """Return form values to be validated through RepoForm"""
 
        defs = dict(
 
            repo_name=None,
 
            repo_type='hg',
 
            clone_uri='',
 
            repo_group=u'-1',
 
            repo_description=u'DESC',
 
            repo_group='-1',
 
            repo_description='DESC',
 
            repo_private=False,
 
            repo_landing_rev='rev:tip',
 
            repo_copy_permissions=False,
 
            repo_state=Repository.STATE_CREATED,
 
        )
 
        defs.update(custom)
 
        if 'repo_name_full' not in custom:
 
            defs.update({'repo_name_full': defs['repo_name']})
 

	
 
        # fix the repo name if passed as repo_name_full
 
        if defs['repo_name']:
 
            defs['repo_name'] = defs['repo_name'].split('/')[-1]
 

	
 
        return defs
 

	
 
    def _get_repo_group_create_params(self, **custom):
 
        """Return form values to be validated through RepoGroupForm"""
 
        defs = dict(
 
            group_name=None,
 
            group_description=u'DESC',
 
            parent_group_id=u'-1',
 
            group_description='DESC',
 
            parent_group_id='-1',
 
            perms_updates=[],
 
            perms_new=[],
 
            recursive=False
 
        )
 
        defs.update(custom)
 

	
 
        return defs
 

	
 
    def _get_user_create_params(self, name, **custom):
 
        defs = dict(
 
            username=name,
 
            password='qweqwe',
 
            email='%s+test@example.com' % name,
 
            firstname=u'TestUser',
 
            lastname=u'Test',
 
            firstname='TestUser',
 
            lastname='Test',
 
            active=True,
 
            admin=False,
 
            extern_type='internal',
 
            extern_name=None
 
        )
 
        defs.update(custom)
 

	
 
        return defs
 

	
 
    def _get_user_group_create_params(self, name, **custom):
 
        defs = dict(
 
            users_group_name=name,
 
            user_group_description=u'DESC',
 
            user_group_description='DESC',
 
            users_group_active=True,
 
            user_group_data={},
 
        )
 
        defs.update(custom)
 

	
 
        return defs
 

	
 
    def create_repo(self, name, repo_group=None, **kwargs):
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            r = Repository.get_by_repo_name(name)
 
            if r:
 
                return r
 

	
 
        if isinstance(repo_group, RepoGroup):
 
            repo_group = repo_group.group_id
 

	
 
        form_data = self._get_repo_create_params(repo_name=name, **kwargs)
 
        form_data['repo_group'] = repo_group # patch form dict so it can be used directly by model
 
        cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
 
        RepoModel().create(form_data, cur_user)
 
        Session().commit()
 
        ScmModel().mark_for_invalidation(name)
 
        return Repository.get_by_repo_name(name)
 

	
 
    def create_fork(self, repo_to_fork, fork_name, **kwargs):
 
        repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
 

	
 
        form_data = self._get_repo_create_params(repo_name=fork_name,
 
                                            fork_parent_id=repo_to_fork,
 
                                            repo_type=repo_to_fork.repo_type,
 
                                            **kwargs)
 
        # patch form dict so it can be used directly by model
 
        form_data['description'] = form_data['repo_description']
 
        form_data['private'] = form_data['repo_private']
 
        form_data['landing_rev'] = form_data['repo_landing_rev']
 

	
 
        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
 
        RepoModel().create_fork(form_data, cur_user=owner)
 
        Session().commit()
 
        ScmModel().mark_for_invalidation(fork_name)
 
        r = Repository.get_by_repo_name(fork_name)
 
        assert r
 
        return r
 

	
 
    def destroy_repo(self, repo_name, **kwargs):
 
        RepoModel().delete(repo_name, **kwargs)
 
        Session().commit()
 
@@ -208,183 +208,183 @@ class Fixture(object):
 
            )
 
        Session().commit()
 
        gr = RepoGroup.get_by_group_name(gr.group_name)
 
        return gr
 

	
 
    def destroy_repo_group(self, repogroupid):
 
        RepoGroupModel().delete(repogroupid)
 
        Session().commit()
 

	
 
    def create_user(self, name, **kwargs):
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            user = User.get_by_username(name)
 
            if user:
 
                return user
 
        form_data = self._get_user_create_params(name, **kwargs)
 
        user = UserModel().create(form_data)
 
        Session().commit()
 
        user = User.get_by_username(user.username)
 
        return user
 

	
 
    def destroy_user(self, userid):
 
        UserModel().delete(userid)
 
        Session().commit()
 

	
 
    def create_user_group(self, name, **kwargs):
 
        if 'skip_if_exists' in kwargs:
 
            del kwargs['skip_if_exists']
 
            gr = UserGroup.get_by_group_name(group_name=name)
 
            if gr:
 
                return gr
 
        form_data = self._get_user_group_create_params(name, **kwargs)
 
        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
 
        user_group = UserGroupModel().create(
 
            name=form_data['users_group_name'],
 
            description=form_data['user_group_description'],
 
            owner=owner, active=form_data['users_group_active'],
 
            group_data=form_data['user_group_data'])
 
        Session().commit()
 
        user_group = UserGroup.get_by_group_name(user_group.users_group_name)
 
        return user_group
 

	
 
    def destroy_user_group(self, usergroupid):
 
        UserGroupModel().delete(user_group=usergroupid, force=True)
 
        Session().commit()
 

	
 
    def create_gist(self, **kwargs):
 
        form_data = {
 
            'description': u'new-gist',
 
            'description': 'new-gist',
 
            'owner': TEST_USER_ADMIN_LOGIN,
 
            'gist_type': Gist.GIST_PUBLIC,
 
            'lifetime': -1,
 
            'gist_mapping': {'filename1.txt': {'content': 'hello world'}}
 
        }
 
        form_data.update(kwargs)
 
        gist = GistModel().create(
 
            description=form_data['description'], owner=form_data['owner'], ip_addr=IP_ADDR,
 
            gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
 
            lifetime=form_data['lifetime']
 
        )
 
        Session().commit()
 

	
 
        return gist
 

	
 
    def destroy_gists(self, gistid=None):
 
        for g in Gist.query():
 
            if gistid:
 
                if gistid == g.gist_access_id:
 
                    GistModel().delete(g)
 
            else:
 
                GistModel().delete(g)
 
        Session().commit()
 

	
 
    def load_resource(self, resource_name, strip=True):
 
        with open(os.path.join(FIXTURES, resource_name), 'rb') as f:
 
            source = f.read()
 
            if strip:
 
                source = source.strip()
 

	
 
        return source
 

	
 
    def commit_change(self, repo, filename, content, message, vcs_type,
 
                      parent=None, newfile=False, author=None):
 
        repo = Repository.get_by_repo_name(repo)
 
        _cs = parent
 
        if parent is None:
 
            _cs = EmptyChangeset(alias=vcs_type)
 
        if author is None:
 
            author = '%s <%s>' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_EMAIL)
 

	
 
        if newfile:
 
            nodes = {
 
                filename: {
 
                    'content': content
 
                }
 
            }
 
            cs = ScmModel().create_nodes(
 
                user=TEST_USER_ADMIN_LOGIN,
 
                ip_addr=IP_ADDR,
 
                repo=repo,
 
                message=message,
 
                nodes=nodes,
 
                parent_cs=_cs,
 
                author=author,
 
            )
 
        else:
 
            cs = ScmModel().commit_change(
 
                repo=repo.scm_instance, repo_name=repo.repo_name,
 
                cs=parent,
 
                user=TEST_USER_ADMIN_LOGIN,
 
                ip_addr=IP_ADDR,
 
                author=author,
 
                message=message,
 
                content=content,
 
                f_path=filename
 
            )
 
        return cs
 

	
 
    def review_changeset(self, repo, revision, status, author=TEST_USER_ADMIN_LOGIN):
 
        comment = ChangesetCommentsModel().create(u"review comment", repo, author, revision=revision, send_email=False)
 
        comment = ChangesetCommentsModel().create("review comment", repo, author, revision=revision, send_email=False)
 
        csm = ChangesetStatusModel().set_status(repo, ChangesetStatus.STATUS_APPROVED, author, comment, revision=revision)
 
        Session().commit()
 
        return csm
 

	
 
    def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title=u'title'):
 
    def create_pullrequest(self, testcontroller, repo_name, pr_src_rev, pr_dst_rev, title='title'):
 
        org_ref = 'branch:stable:%s' % pr_src_rev
 
        other_ref = 'branch:default:%s' % pr_dst_rev
 
        with test_context(testcontroller.app): # needed to be able to mock request user
 
            org_repo = other_repo = Repository.get_by_repo_name(repo_name)
 
            owner_user = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
            reviewers = [User.get_by_username(TEST_USER_REGULAR_LOGIN)]
 
            request.authuser = AuthUser(dbuser=owner_user)
 
            # creating a PR sends a message with an absolute URL - without routing that requires mocking
 
            with mock.patch.object(helpers, 'url', (lambda arg, qualified=False, **kwargs: ('https://localhost' if qualified else '') + '/fake/' + arg)):
 
                cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, u'No description', owner_user, reviewers)
 
                cmd = CreatePullRequestAction(org_repo, other_repo, org_ref, other_ref, title, 'No description', owner_user, reviewers)
 
                pull_request = cmd.execute()
 
            Session().commit()
 
        return pull_request.pull_request_id
 

	
 

	
 
#==============================================================================
 
# Global test environment setup
 
#==============================================================================
 

	
 
def create_test_env(repos_test_path, config):
 
    """
 
    Makes a fresh database and
 
    install test repository into tmp dir
 
    """
 

	
 
    # PART ONE create db
 
    dbconf = config['sqlalchemy.url']
 
    log.debug('making test db %s', dbconf)
 

	
 
    # create test dir if it doesn't exist
 
    if not os.path.isdir(repos_test_path):
 
        log.debug('Creating testdir %s', repos_test_path)
 
        os.makedirs(repos_test_path)
 

	
 
    dbmanage = DbManage(dbconf=dbconf, root=config['here'],
 
                        tests=True)
 
    dbmanage.create_tables(override=True)
 
    # for tests dynamically set new root paths based on generated content
 
    dbmanage.create_settings(dbmanage.prompt_repo_root_path(repos_test_path))
 
    dbmanage.create_default_user()
 
    dbmanage.admin_prompt()
 
    dbmanage.create_permissions()
 
    dbmanage.populate_default_permissions()
 
    Session().commit()
 
    # PART TWO make test repo
 
    log.debug('making test vcs repositories')
 

	
 
    idx_path = config['index_dir']
 
    data_path = config['cache_dir']
 

	
 
    # clean index and data
 
    if idx_path and os.path.exists(idx_path):
 
        log.debug('remove %s', idx_path)
 
        shutil.rmtree(idx_path)
 

	
 
    if data_path and os.path.exists(data_path):
 
        log.debug('remove %s', data_path)
 
        shutil.rmtree(data_path)
kallithea/tests/functional/test_admin_auth_settings.py
Show inline comments
 
from kallithea.model.db import Setting
 
from kallithea.tests import base
 

	
 

	
 
class TestAuthSettingsController(base.TestController):
 
    def _enable_plugins(self, plugins_list):
 
        test_url = base.url(controller='admin/auth_settings',
 
                       action='auth_settings')
 
        params={'auth_plugins': plugins_list, '_session_csrf_secret_token': self.session_csrf_secret_token()}
 

	
 
        for plugin in plugins_list.split(','):
 
            enable = plugin.partition('kallithea.lib.auth_modules.')[-1]
 
            params.update({'%s_enabled' % enable: True})
 
        response = self.app.post(url=test_url, params=params)
 
        return params
 
        #self.checkSessionFlash(response, 'Auth settings updated successfully')
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(base.url(controller='admin/auth_settings',
 
                                    action='index'))
 
        response.mustcontain('Authentication Plugins')
 

	
 
    @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
 
    def test_ldap_save_settings(self):
 
        self.log_user()
 

	
 
        params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_ldap')
 
        params.update({'auth_ldap_host': u'dc.example.com',
 
        params.update({'auth_ldap_host': 'dc.example.com',
 
                       'auth_ldap_port': '999',
 
                       'auth_ldap_tls_kind': 'PLAIN',
 
                       'auth_ldap_tls_reqcert': 'NEVER',
 
                       'auth_ldap_cacertdir': '',
 
                       'auth_ldap_dn_user': 'test_user',
 
                       'auth_ldap_dn_pass': 'test_pass',
 
                       'auth_ldap_base_dn': 'test_base_dn',
 
                       'auth_ldap_filter': 'test_filter',
 
                       'auth_ldap_search_scope': 'BASE',
 
                       'auth_ldap_attr_login': 'test_attr_login',
 
                       'auth_ldap_attr_firstname': 'ima',
 
                       'auth_ldap_attr_lastname': 'tester',
 
                       'auth_ldap_attr_email': 'test@example.com'})
 

	
 
        test_url = base.url(controller='admin/auth_settings',
 
                       action='auth_settings')
 

	
 
        response = self.app.post(url=test_url, params=params)
 
        self.checkSessionFlash(response, 'Auth settings updated successfully')
 

	
 
        new_settings = Setting.get_auth_settings()
 
        assert new_settings['auth_ldap_host'] == u'dc.example.com', 'fail db write compare'
 
        assert new_settings['auth_ldap_host'] == 'dc.example.com', 'fail db write compare'
 

	
 
    @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
 
    def test_ldap_error_form_wrong_port_number(self):
 
        self.log_user()
 

	
 
        params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_ldap')
 
        params.update({'auth_ldap_host': '',
 
                       'auth_ldap_port': 'i-should-be-number',  # bad port num
 
                       'auth_ldap_tls_kind': 'PLAIN',
 
                       'auth_ldap_tls_reqcert': 'NEVER',
 
                       'auth_ldap_dn_user': '',
 
                       'auth_ldap_dn_pass': '',
 
                       'auth_ldap_base_dn': '',
 
                       'auth_ldap_filter': '',
 
                       'auth_ldap_search_scope': 'BASE',
 
                       'auth_ldap_attr_login': '',
 
                       'auth_ldap_attr_firstname': '',
 
                       'auth_ldap_attr_lastname': '',
 
                       'auth_ldap_attr_email': ''})
 
        test_url = base.url(controller='admin/auth_settings',
 
                       action='auth_settings')
 

	
 
        response = self.app.post(url=test_url, params=params)
 

	
 
        response.mustcontain("""<span class="error-message">"""
 
                             """Please enter a number</span>""")
 

	
 
    @base.skipif(not base.ldap_lib_installed, reason='skipping due to missing ldap lib')
 
    def test_ldap_error_form(self):
 
        self.log_user()
 

	
 
        params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_ldap')
 
        params.update({'auth_ldap_host': 'Host',
 
                       'auth_ldap_port': '123',
 
                       'auth_ldap_tls_kind': 'PLAIN',
 
                       'auth_ldap_tls_reqcert': 'NEVER',
 
                       'auth_ldap_dn_user': '',
 
                       'auth_ldap_dn_pass': '',
 
                       'auth_ldap_base_dn': '',
 
                       'auth_ldap_filter': '',
 
                       'auth_ldap_search_scope': 'BASE',
 
                       'auth_ldap_attr_login': '',  # <----- missing required input
 
                       'auth_ldap_attr_firstname': '',
 
                       'auth_ldap_attr_lastname': '',
 
                       'auth_ldap_attr_email': ''})
 

	
 
        test_url = base.url(controller='admin/auth_settings',
 
                       action='auth_settings')
 
@@ -194,66 +194,66 @@ class TestAuthSettingsController(base.Te
 

	
 
    def test_container_auth_clean_username_backslash(self):
 
        self._container_auth_setup(
 
            auth_container_header='REMOTE_USER',
 
            auth_container_email_header='',
 
            auth_container_firstname_header='',
 
            auth_container_lastname_header='',
 
            auth_container_fallback_header='',
 
            auth_container_clean_username='True',
 
        )
 
        self._container_auth_verify_login(
 
            extra_environ={'REMOTE_USER': r'example\jane'},
 
            resulting_username=r'jane',
 
        )
 

	
 
    def test_container_auth_no_logout(self):
 
        self._container_auth_setup(
 
            auth_container_header='REMOTE_USER',
 
            auth_container_email_header='',
 
            auth_container_firstname_header='',
 
            auth_container_lastname_header='',
 
            auth_container_fallback_header='',
 
            auth_container_clean_username='True',
 
        )
 
        response = self.app.get(
 
            url=base.url(controller='admin/my_account', action='my_account'),
 
            extra_environ={'REMOTE_USER': 'john'},
 
        )
 
        assert b'Log Out' not in response.normal_body
 

	
 
    def test_crowd_save_settings(self):
 
        self.log_user()
 

	
 
        params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_crowd')
 
        params.update({'auth_crowd_host': ' hostname ',
 
                       'auth_crowd_app_password': 'secret',
 
                       'auth_crowd_admin_groups': 'mygroup',
 
                       'auth_crowd_port': '123',
 
                       'auth_crowd_method': 'https',
 
                       'auth_crowd_app_name': 'xyzzy'})
 

	
 
        test_url = base.url(controller='admin/auth_settings',
 
                       action='auth_settings')
 

	
 
        response = self.app.post(url=test_url, params=params)
 
        self.checkSessionFlash(response, 'Auth settings updated successfully')
 

	
 
        new_settings = Setting.get_auth_settings()
 
        assert new_settings['auth_crowd_host'] == u'hostname', 'fail db write compare'
 
        assert new_settings['auth_crowd_host'] == 'hostname', 'fail db write compare'
 

	
 
    @base.skipif(not base.pam_lib_installed, reason='skipping due to missing pam lib')
 
    def test_pam_save_settings(self):
 
        self.log_user()
 

	
 
        params = self._enable_plugins('kallithea.lib.auth_modules.auth_internal,kallithea.lib.auth_modules.auth_pam')
 
        params.update({'auth_pam_service': 'kallithea',
 
                       'auth_pam_gecos': '^foo-.*'})
 

	
 
        test_url = base.url(controller='admin/auth_settings',
 
                       action='auth_settings')
 

	
 
        response = self.app.post(url=test_url, params=params)
 
        self.checkSessionFlash(response, 'Auth settings updated successfully')
 

	
 
        new_settings = Setting.get_auth_settings()
 
        assert new_settings['auth_pam_service'] == u'kallithea', 'fail db write compare'
 
        assert new_settings['auth_pam_service'] == 'kallithea', 'fail db write compare'
kallithea/tests/functional/test_admin_gists.py
Show inline comments
 
from kallithea.model.db import Gist, User
 
from kallithea.model.gist import GistModel
 
from kallithea.model.meta import Session
 
from kallithea.tests import base
 

	
 

	
 
def _create_gist(f_name, content='some gist', lifetime=-1,
 
                 description=u'gist-desc', gist_type='public',
 
                 description='gist-desc', gist_type='public',
 
                 owner=base.TEST_USER_ADMIN_LOGIN):
 
    gist_mapping = {
 
        f_name: {'content': content}
 
    }
 
    owner = User.get_by_username(owner)
 
    gist = GistModel().create(description, owner=owner, ip_addr=base.IP_ADDR,
 
                       gist_mapping=gist_mapping, gist_type=gist_type,
 
                       lifetime=lifetime)
 
    Session().commit()
 
    return gist
 

	
 

	
 
class TestGistsController(base.TestController):
 

	
 
    def teardown_method(self, method):
 
        for g in Gist.query():
 
            GistModel().delete(g)
 
        Session().commit()
 

	
 
    def test_index(self):
 
        self.log_user()
 
        response = self.app.get(base.url('gists'))
 
        # Test response...
 
        response.mustcontain('There are no gists yet')
 

	
 
        g1 = _create_gist('gist1').gist_access_id
 
        g2 = _create_gist('gist2', lifetime=1400).gist_access_id
 
        g3 = _create_gist('gist3', description=u'gist3-desc').gist_access_id
 
        g3 = _create_gist('gist3', description='gist3-desc').gist_access_id
 
        g4 = _create_gist('gist4', gist_type='private').gist_access_id
 
        response = self.app.get(base.url('gists'))
 
        # Test response...
 
        response.mustcontain('gist: %s' % g1)
 
        response.mustcontain('gist: %s' % g2)
 
        response.mustcontain('Expires: in 23 hours')  # we don't care about the end
 
        response.mustcontain('gist: %s' % g3)
 
        response.mustcontain('gist3-desc')
 
        response.mustcontain(no=['gist: %s' % g4])
 

	
 
    def test_index_private_gists(self):
 
        self.log_user()
 
        gist = _create_gist('gist5', gist_type='private')
 
        response = self.app.get(base.url('gists', private=1))
 
        # Test response...
 

	
 
        # and privates
 
        response.mustcontain('gist: %s' % gist.gist_access_id)
 

	
 
    def test_create_missing_description(self):
 
        self.log_user()
 
        response = self.app.post(base.url('gists'),
 
                                 params={'lifetime': -1, '_session_csrf_secret_token': self.session_csrf_secret_token()},
 
                                 status=200)
 

	
 
        response.mustcontain('Missing value')
 

	
 
    def test_create(self):
 
        self.log_user()
 
        response = self.app.post(base.url('gists'),
 
                                 params={'lifetime': -1,
 
                                         'content': 'gist test',
 
                                         'filename': 'foo',
 
                                         'public': 'public',
 
                                         '_session_csrf_secret_token': self.session_csrf_secret_token()},
 
                                 status=302)
 
        response = response.follow()
 
        response.mustcontain('added file: foo')
 
        response.mustcontain('gist test')
 
        response.mustcontain('<div class="label label-success">Public Gist</div>')
 

	
 
    def test_create_with_path_with_dirs(self):
 
        self.log_user()
 
        response = self.app.post(base.url('gists'),
 
                                 params={'lifetime': -1,
 
                                         'content': 'gist test',
 
                                         'filename': '/home/foo',
 
                                         'public': 'public',
kallithea/tests/functional/test_admin_repo_groups.py
Show inline comments
 
from kallithea.model.meta import Session
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.tests.base import TestController, url
 
from kallithea.tests.fixture import Fixture
 

	
 

	
 
fixture = Fixture()
 

	
 

	
 
class TestRepoGroupsController(TestController):
 

	
 
    def test_case_insensitivity(self):
 
        self.log_user()
 
        group_name = u'newgroup'
 
        group_name = 'newgroup'
 
        response = self.app.post(url('repos_groups'),
 
                                 fixture._get_repo_group_create_params(group_name=group_name,
 
                                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
 
        # try to create repo group with swapped case
 
        swapped_group_name = group_name.swapcase()
 
        response = self.app.post(url('repos_groups'),
 
                                 fixture._get_repo_group_create_params(group_name=swapped_group_name,
 
                                                                 _session_csrf_secret_token=self.session_csrf_secret_token()))
 
        response.mustcontain('already exists')
 

	
 
        RepoGroupModel().delete(group_name)
 
        Session().commit()

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)