Changeset - 12b47803189f
[Not reviewed]
stable
0 21 0
Søren Løvborg - 10 years ago 2015-09-11 13:00:47
sorenl@unity3d.com
cleanup: use example.com for tests and examples

example.com is explicitly reserved for this purpose. Using that means
we won't accidentally hammer a real server or real email address if an
example value escapes into the wild, e.g. in an automated test.

The domain "kallithea.example.com" has been used throughout to refer to the
example Kallithea server.
12 files changed:
0 comments (0 inline, 0 general)
development.ini
Show inline comments
 
################################################################################
 
################################################################################
 
# Kallithea - Development config:                                              #
 
# listening on *:5000                                                          #
 
# sqlite and kallithea.db                                                      #
 
# initial_repo_scan = true                                                     #
 
# set debug = true                                                             #
 
# verbose and colorful logging                                                 #
 
#                                                                              #
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 

	
 
################################################################################
 
## Email settings                                                             ##
 
##                                                                            ##
 
## Refer to the documentation ("Email settings") for more details.            ##
 
##                                                                            ##
 
## It is recommended to use a valid sender address that passes access         ##
 
## validation and spam filtering in mail servers.                             ##
 
################################################################################
 

	
 
## 'From' header for application emails. You can optionally add a name.
 
## Default:
 
#app_email_from = Kallithea
 
## Examples:
 
#app_email_from = Kallithea <kallithea-noreply@example.com>
 
#app_email_from = kallithea-noreply@example.com
 

	
 
## Subject prefix for application emails.
 
## A space between this prefix and the real subject is automatically added.
 
## Default:
 
#email_prefix =
 
## Example:
 
#email_prefix = [Kallithea]
 

	
 
## Recipients for error emails and fallback recipients of application mails.
 
## Multiple addresses can be specified, space-separated.
 
## Only addresses are allowed, do not add any name part.
 
## Default:
 
#email_to =
 
## Examples:
 
#email_to = admin@example.com
 
#email_to = admin@example.com another_admin@example.com
 

	
 
## 'From' header for error emails. You can optionally add a name.
 
## Default:
 
#error_email_from = pylons@yourapp.com
 
## Examples:
 
#error_email_from = Kallithea Errors <kallithea-noreply@example.com>
 
#error_email_from = paste_error@example.com
 

	
 
## SMTP server settings
 
## Only smtp_server is mandatory. All other settings take the specified default
 
## values.
 
#smtp_server = mail.server.com
 
#smtp_server = smtp.example.com
 
#smtp_username =
 
#smtp_password =
 
#smtp_port = 25
 
#smtp_use_tls = false
 
#smtp_use_ssl = false
 
## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.).
 
## If empty, use any of the authentication parameters supported by the server.
 
#smtp_auth =
 

	
 
[server:main]
 
## PASTE ##
 
#use = egg:Paste#http
 
## nr of worker threads to spawn
 
#threadpool_workers = 5
 
## max request before thread respawn
 
#threadpool_max_requests = 10
 
## option to use threads of process
 
#use_threadpool = true
 

	
 
## WAITRESS ##
 
use = egg:waitress#main
 
## number of worker threads
 
threads = 5
 
## MAX BODY SIZE 100GB
 
max_request_body_size = 107374182400
 
## use poll instead of select, fixes fd limits, may not work on old
 
## windows systems.
 
#asyncore_use_poll = True
 

	
 
## GUNICORN ##
 
#use = egg:gunicorn#main
 
## number of process workers. You must set `instance_id = *` when this option
 
## is set to more than one worker
 
#workers = 1
 
## process name
 
#proc_name = kallithea
 
## type of worker class, one of sync, eventlet, gevent, tornado
 
## recommended for bigger setup is using of of other than sync one
 
#worker_class = sync
 
#max_requests = 1000
 
## ammount of time a worker can handle request before it gets killed and
 
## restarted
 
#timeout = 3600
 

	
 
## UWSGI ##
 
## run with uwsgi --ini-paste-logged <inifile.ini>
 
#[uwsgi]
 
#socket = /tmp/uwsgi.sock
 
#master = true
 
#http = 127.0.0.1:5000
 

	
 
## set as deamon and redirect all output to file
 
#daemonize = ./uwsgi_kallithea.log
 

	
 
## master process PID
 
#pidfile = ./uwsgi_kallithea.pid
 

	
 
## stats server with workers statistics, use uwsgitop
 
## for monitoring, `uwsgitop 127.0.0.1:1717`
 
#stats = 127.0.0.1:1717
 
#memory-report = true
 

	
 
## log 5XX errors
 
#log-5xx = true
 

	
 
## Set the socket listen queue size.
 
#listen = 256
 

	
 
## Gracefully Reload workers after the specified amount of managed requests
 
## (avoid memory leaks).
 
#max-requests = 1000
 

	
 
## enable large buffers
 
#buffer-size = 65535
 

	
 
## socket and http timeouts ##
 
#http-timeout = 3600
 
#socket-timeout = 3600
 

	
 
## Log requests slower than the specified number of milliseconds.
 
#log-slow = 10
 

	
 
## Exit if no app can be loaded.
 
#need-app = true
 

	
 
## Set lazy mode (load apps in workers instead of master).
 
#lazy = true
 

	
 
## scaling ##
 
## set cheaper algorithm to use, if not set default will be used
 
#cheaper-algo = spare
 

	
 
## minimum number of workers to keep at all times
 
#cheaper = 1
 

	
 
## number of workers to spawn at startup
 
#cheaper-initial = 1
 

	
 
## maximum number of workers that can be spawned
 
#workers = 4
 

	
 
## how many workers should be spawned at a time
 
#cheaper-step = 1
 

	
 
## COMMON ##
 
host = 0.0.0.0
 
port = 5000
 

	
 
## middleware for hosting the WSGI application under a URL prefix
 
#[filter:proxy-prefix]
 
#use = egg:PasteDeploy#prefix
 
#prefix = /<your-prefix>
 

	
 
[app:main]
 
use = egg:kallithea
 
## enable proxy prefix middleware
 
#filter-with = proxy-prefix
 

	
 
full_stack = true
 
static_files = true
 
## Available Languages:
 
## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW
 
lang =
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 

	
 
## perform a full repository scan on each server start, this should be
 
## set to false after first startup, to allow faster server restarts.
 
#initial_repo_scan = false
 
initial_repo_scan = true
 

	
 
## uncomment and set this path to use archive download cache
 
archive_cache_dir = %(here)s/tarballcache
 

	
 
## change this to unique ID for security
 
app_instance_uuid = development-not-secret
 

	
 
## cut off limit for large diffs (size in bytes)
 
cut_off_limit = 256000
 

	
 
## use cache version of scm repo everywhere
 
vcs_full_cache = true
 

	
 
## force https in Kallithea, fixes https redirects, assumes it's always https
 
force_https = false
 

	
 
## use Strict-Transport-Security headers
 
use_htsts = false
 

	
 
## number of commits stats will parse on each iteration
 
commit_parse_limit = 25
 

	
 
## path to git executable
 
git_path = git
 

	
 
## git rev filter option, --all is the default filter, if you need to
 
## hide all refs in changelog switch this to --branches --tags
 
#git_rev_filter = --branches --tags
 

	
 
## RSS feed options
 
rss_cut_off_limit = 256000
 
rss_items_per_page = 10
 
rss_include_diff = false
 

	
 
## options for showing and identifying changesets
 
show_sha_length = 12
 
show_revision_number = false
 

	
 
## gist URL alias, used to create nicer urls for gist. This should be an
 
## url that does rewrites to _admin/gists/<gistid>.
 
## example: http://gist.kallithea.server/{gistid}. Empty means use the internal
 
## Kallithea url, ie. http[s]://your.kallithea.server/_admin/gists/<gistid>
 
## example: http://gist.example.com/{gistid}. Empty means use the internal
 
## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid>
 
gist_alias_url =
 

	
 
## white list of API enabled controllers. This allows to add list of
 
## controllers to which access will be enabled by api_key. eg: to enable
 
## api access to raw_files put `FilesController:raw`, to enable access to patches
 
## add `ChangesetController:changeset_patch`. This list should be "," separated
 
## Syntax is <ControllerClass>:<function>. Check debug logs for generated names
 
## Recommended settings below are commented out:
 
api_access_controllers_whitelist =
 
#    ChangesetController:changeset_patch,
 
#    ChangesetController:changeset_raw,
 
#    FilesController:raw,
 
#    FilesController:archivefile
 

	
 
## default encoding used to convert from and to unicode
 
## can be also a comma seperated list of encoding in case of mixed encodings
 
default_encoding = utf8
 

	
 
## issue tracker for Kallithea (leave blank to disable, absent for default)
 
#bugtracker = https://bitbucket.org/conservancy/kallithea/issues
 

	
 
## issue tracking mapping for commits messages
 
## comment out issue_pat, issue_server, issue_prefix to enable
 

	
 
## pattern to get the issues from commit messages
 
## default one used here is #<numbers> with a regex passive group for `#`
 
## {id} will be all groups matched from this pattern
 

	
 
issue_pat = (?:\s*#)(\d+)
 

	
 
## server url to the issue, each {id} will be replaced with match
 
## fetched from the regex and {repo} is replaced with full repository name
 
## including groups {repo_name} is replaced with just name of repo
 

	
 
issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 
issue_server_link = https://issues.example.com/{repo}/issue/{id}
 

	
 
## prefix to add to link to indicate it's an url
 
## #314 will be replaced by <issue_prefix><id>
 

	
 
issue_prefix = #
 

	
 
## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
 
## multiple patterns, to other issues server, wiki or others
 
## below an example how to create a wiki pattern
 
# wiki-some-id -> https://mywiki.com/some-id
 
# wiki-some-id -> https://wiki.example.com/some-id
 

	
 
#issue_pat_wiki = (?:wiki-)(.+)
 
#issue_server_link_wiki = https://mywiki.com/{id}
 
#issue_server_link_wiki = https://wiki.example.com/{id}
 
#issue_prefix_wiki = WIKI-
 

	
 
## instance-id prefix
 
## a prefix key for this instance used for cache invalidation when running
 
## multiple instances of kallithea, make sure it's globally unique for
 
## all running kallithea instances. Leave empty if you don't use it
 
instance_id =
 

	
 
## alternative return HTTP header for failed authentication. Default HTTP
 
## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with
 
## handling that. Set this variable to 403 to return HTTPForbidden
 
auth_ret_code =
 

	
 
## locking return code. When repository is locked return this HTTP code. 2XX
 
## codes don't break the transactions while 4XX codes do
 
lock_ret_code = 423
 

	
 
## allows to change the repository location in settings page
 
allow_repo_location_change = True
 

	
 
## allows to setup custom hooks in settings page
 
allow_custom_hooks_settings = True
 

	
 
####################################
 
###        CELERY CONFIG        ####
 
####################################
 

	
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = kallithea.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = DEBUG
 
celeryd.max.tasks.per.child = 1
 

	
 
## tasks will never be sent to the queue, but executed locally instead.
 
celery.always.eager = false
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 

	
 
beaker.cache.data_dir = %(here)s/data/cache/data
 
beaker.cache.lock_dir = %(here)s/data/cache/lock
 

	
 
beaker.cache.regions = short_term,long_term,sql_cache_short
 

	
 
beaker.cache.short_term.type = memory
 
beaker.cache.short_term.expire = 60
 
beaker.cache.short_term.key_length = 256
 

	
 
beaker.cache.long_term.type = memory
 
beaker.cache.long_term.expire = 36000
 
beaker.cache.long_term.key_length = 256
 

	
 
beaker.cache.sql_cache_short.type = memory
 
beaker.cache.sql_cache_short.expire = 10
 
beaker.cache.sql_cache_short.key_length = 256
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 

	
 
## Name of session cookie. Should be unique for a given host and path, even when running
 
## on different ports. Otherwise, cookie sessions will be shared and messed up.
 
beaker.session.key = kallithea
 
## Sessions should always only be accessible by the browser, not directly by JavaScript.
 
beaker.session.httponly = true
 
## Session lifetime. 2592000 seconds is 30 days.
 
beaker.session.timeout = 2592000
 

	
 
## Server secret used with HMAC to ensure integrity of cookies.
 
beaker.session.secret = development-not-secret
 
## Further, encrypt the data with AES.
 
#beaker.session.encrypt_key = <key_for_encryption>
 
#beaker.session.validate_key = <validation_key>
 

	
 
## Type of storage used for the session, current types are
 
## dbm, file, memcached, database, and memory.
 

	
 
## File system storage of session data. (default)
 
#beaker.session.type = file
 

	
 
## Cookie only, store all session data inside the cookie. Requires secure secrets.
 
#beaker.session.type = cookie
 

	
 
## Database storage of session data.
 
#beaker.session.type = ext:database
 
#beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea
 
#beaker.session.table_name = db_session
 

	
 
############################
 
## ERROR HANDLING SYSTEMS ##
 
############################
 

	
 
####################
 
### [errormator] ###
 
####################
 

	
 
## Errormator is tailored to work with Kallithea, see
 
## http://errormator.com for details how to obtain an account
 
## you must install python package `errormator_client` to make it work
 

	
 
## errormator enabled
 
errormator = false
 

	
 
errormator.server_url = https://api.errormator.com
 
errormator.api_key = YOUR_API_KEY
 

	
 
## TWEAK AMOUNT OF INFO SENT HERE
 

	
 
## enables 404 error logging (default False)
 
errormator.report_404 = false
 

	
 
## time in seconds after request is considered being slow (default 1)
 
errormator.slow_request_time = 1
 

	
 
## record slow requests in application
 
## (needs to be enabled for slow datastore recording and time tracking)
 
errormator.slow_requests = true
 

	
 
## enable hooking to application loggers
 
#errormator.logging = true
 

	
 
## minimum log level for log capture
 
#errormator.logging.level = WARNING
 

	
 
## send logs only from erroneous/slow requests
 
## (saves API quota for intensive logging)
 
errormator.logging_on_error = false
 

	
 
## list of additonal keywords that should be grabbed from environ object
 
## can be string with comma separated list of words in lowercase
 
## (by default client will always send following info:
 
## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
 
## start with HTTP* this list be extended with additional keywords here
 
errormator.environ_keys_whitelist =
 

	
 
## list of keywords that should be blanked from request object
 
## can be string with comma separated list of words in lowercase
 
## (by default client will always blank keys that contain following words
 
## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
 
## this list be extended with additional keywords set here
 
errormator.request_keys_blacklist =
 

	
 
## list of namespaces that should be ignores when gathering log entries
 
## can be string with comma separated list of namespaces
 
## (by default the client ignores own entries: errormator_client.client)
 
errormator.log_namespace_blacklist =
 

	
 
################
 
### [sentry] ###
 
################
 

	
 
## sentry is a alternative open source error aggregator
 
## you must install python packages `sentry` and `raven` to enable
 

	
 
sentry.dsn = YOUR_DNS
 
sentry.servers =
 
sentry.name =
 
sentry.key =
 
sentry.public_key =
 
sentry.secret_key =
 
sentry.project =
 
sentry.site =
 
sentry.include_paths =
 
sentry.exclude_paths =
 

	
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
#set debug = false
 
set debug = true
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 

	
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 

	
 
# SQLITE [default]
 
sqlalchemy.db1.url = sqlite:///%(here)s/kallithea.db?timeout=60
 

	
 
# POSTGRESQL
 
#sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea
 

	
 
# MySQL
 
#sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea
 

	
 
# see sqlalchemy docs for others
 

	
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.db1.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 

	
 
[loggers]
 
keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
#############
 
## LOGGERS ##
 
#############
 

	
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers =
 
qualname = routes.middleware
 
## "level = DEBUG" logs the route matched and routing variables.
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers =
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers =
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_kallithea]
 
level = DEBUG
 
handlers =
 
qualname = kallithea
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
[logger_whoosh_indexer]
 
level = DEBUG
 
handlers =
 
qualname = whoosh_indexer
 
propagate = 1
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
#level = INFO
 
#formatter = generic
 
level = DEBUG
 
formatter = color_formatter
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
#level = WARN
 
#formatter = generic
 
level = DEBUG
 
formatter = color_formatter_sql
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class = kallithea.lib.colored_formatter.ColorFormatter
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class = kallithea.lib.colored_formatter.ColorFormatterSql
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
docs/api/api.rst
Show inline comments
 
.. _api:
 

	
 
===
 
API
 
===
 

	
 
Kallithea has a simple JSON RPC API with a single schema for calling all API
 
methods. Everything is available by sending JSON encoded http(s) requests to
 
``<your_server>/_admin/api``.
 

	
 

	
 
API access for web views
 
++++++++++++++++++++++++
 

	
 
API access can also be turned on for each web view in Kallithea that is
 
decorated with the ``@LoginRequired`` decorator. Some views use
 
``@LoginRequired(api_access=True)`` and are always available. By default only
 
RSS/Atom feed views are enabled. Other views are
 
only available if they have been whitelisted. Edit the
 
``api_access_controllers_whitelist`` option in your .ini file and define views
 
that should have API access enabled.
 

	
 
For example, to enable API access to patch/diff, raw file and archive::
 

	
 
    api_access_controllers_whitelist =
 
        ChangesetController:changeset_patch,
 
        ChangesetController:changeset_raw,
 
        FilesController:raw,
 
        FilesController:archivefile
 

	
 
After this change, a Kallithea view can be accessed without login by adding a
 
GET parameter ``?api_key=<api_key>`` to the URL.
 

	
 
Exposing raw diffs is a good way to integrate with
 
third-party services like code review, or build farms that can download archives.
 

	
 

	
 
API access
 
++++++++++
 

	
 
Clients must send JSON encoded JSON-RPC requests::
 

	
 
    {
 
        "id: "<id>",
 
        "api_key": "<api_key>",
 
        "method": "<method_name>",
 
        "args": {"<arg_key>": "<arg_val>"}
 
    }
 

	
 
For example, to pull to a local "CPython" mirror using curl::
 

	
 
    curl https://example.com/_admin/api -X POST -H 'content-type:text/plain' \
 
    curl https://kallithea.example.com/_admin/api -X POST -H 'content-type:text/plain' \
 
        --data-binary '{"id":1,"api_key":"xe7cdb2v278e4evbdf5vs04v832v0efvcbcve4a3","method":"pull","args":{"repo":"CPython"}}'
 

	
 
In general, provide
 
 - *id*, a value of any type, can be used to match the response with the request that it is replying to.
 
 - *api_key*, for authentication and permission validation.
 
 - *method*, the name of the method to call -- a list of available methods can be found below.
 
 - *args*, the arguments to pass to the method.
 

	
 
.. note::
 

	
 
    api_key can be found or set on the user account page.
 

	
 
The response to the JSON-RPC API call will always be a JSON structure::
 

	
 
    {
 
        "id": <id>,  # the id that was used in the request
 
        "result": <result>|null,  # JSON formatted result (null on error)
 
        "error": null|<error_message>  # JSON formatted error (null on success)
 
    }
 

	
 
All responses from the API will be ``HTTP/1.0 200 OK``. If an error occurs,
 
the reponse will have a failure description in *error* and
 
*result* will be null.
 

	
 

	
 
API client
 
++++++++++
 

	
 
Kallithea comes with a ``kallithea-api`` command line tool, providing a convenient
 
way to call the JSON-RPC API.
 

	
 
For example, to call ``get_repo``::
 

	
 
    kallithea-api --apihost=<Kallithea URL> --apikey=<API key> get_repo
 

	
 
    Calling method get_repo => <Kallithea URL>
 
    Server response
 
    ERROR:"Missing non optional `repoid` arg in JSON DATA"
 

	
 
Oops, looks like we forgot to add an argument. Let's try again, now
 
providing the ``repoid`` as a parameter::
 

	
 
    kallithea-api --apihost=<Kallithea URL> --apikey=<API key> get_repo repoid:myrepo
 

	
 
    Calling method get_repo => <Kallithea URL>
 
    Server response
 
    {
 
        "clone_uri": null,
 
        "created_on": "2015-08-31T14:55:19.042",
 
    ...
 

	
 
To avoid specifying ``apihost`` and ``apikey`` every time, run::
 

	
 
    kallithea-api --save-config --apihost=<Kallithea URL> --apikey=<API key>
 

	
 
This will create a ``~/.config/kallithea`` with the specified URL and API key
 
so you don't have to specify them every time.
 

	
 

	
 
API methods
 
+++++++++++
 

	
 

	
 
pull
 
----
 

	
 
Pull the given repo from remote location. Can be used to automatically keep
 
remote repos up to date.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "pull"
 
    args :    {
 
                "repoid" : "<reponame or repo_id>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : "Pulled from `<reponame>`"
 
    error :  null
 

	
 
rescan_repos
 
------------
 

	
 
Rescan repositories. If ``remove_obsolete`` is set,
 
Kallithea will delete repos that are in the database but not in the filesystem.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "rescan_repos"
 
    args :    {
 
                "remove_obsolete" : "<boolean = Optional(False)>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : "{'added': [<list of names of added repos>],
 
               'removed': [<list of names of removed repos>]}"
 
    error :  null
 

	
 
invalidate_cache
 
----------------
 

	
 
Invalidate the cache for a repository.
 
This command can only be executed using the api_key of a user with admin rights,
 
or that of a regular user with admin or write access to the repository.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "invalidate_cache"
 
    args :    {
 
                "repoid" : "<reponame or repo_id>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : "Caches of repository `<reponame>`"
 
    error :  null
 

	
 
lock
 
----
 

	
 
Set the locking state on the given repository by the given user.
 
If the param ``userid`` is skipped, it is set to the ID of the user who is calling this method.
 
If param ``locked`` is skipped, the current lock state of the repository is returned.
 
This command can only be executed using the api_key of a user with admin rights, or that of a regular user with admin or write access to the repository.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "lock"
 
    args :    {
 
                "repoid" : "<reponame or repo_id>"
 
                "userid" : "<user_id or username = Optional(=apiuser)>",
 
                "locked" : "<bool true|false = Optional(=None)>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : {
 
                 "repo": "<reponame>",
 
                 "locked": "<bool true|false>",
 
                 "locked_since": "<float lock_time>",
 
                 "locked_by": "<username>",
 
                 "msg": "User `<username>` set lock state for repo `<reponame>` to `<false|true>`"
 
             }
 
    error :  null
 

	
 
get_ip
 
------
 

	
 
Return IP address as seen from Kallithea server, together with all
 
defined IP addresses for given user.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_ip"
 
    args :    {
 
                "userid" : "<user_id or username>",
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : {
 
                 "ip_addr_server": <ip_from_clien>",
 
                 "user_ips": [
 
                                {
 
                                   "ip_addr": "<ip_with_mask>",
 
                                   "ip_range": ["<start_ip>", "<end_ip>"],
 
                                },
 
                                ...
 
                             ]
 
             }
 

	
 
    error :  null
 

	
 
get_user
 
--------
 

	
 
Get a user by username or userid. The result is empty if user can't be found.
 
If userid param is skipped, it is set to id of user who is calling this method.
 
Any userid can be specified when the command is executed using the api_key of a user with admin rights.
 
Regular users can only speicy their own userid.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_user"
 
    args :    {
 
                "userid" : "<username or user_id Optional(=apiuser)>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: None if user does not exist or
 
            {
 
                "user_id" :     "<user_id>",
 
                "api_key" :     "<api_key>",
 
                "username" :    "<username>",
 
                "firstname":    "<firstname>",
 
                "lastname" :    "<lastname>",
 
                "email" :       "<email>",
 
                "emails":       "<list_of_all_additional_emails>",
 
                "ip_addresses": "<list_of_ip_addresses_for_user>",
 
                "active" :      "<bool>",
 
                "admin" :       "<bool>",
 
                "ldap_dn" :     "<ldap_dn>",
 
                "last_login":   "<last_login>",
 
                "permissions": {
 
                    "global": ["hg.create.repository",
 
                               "repository.read",
 
                               "hg.register.manual_activate"],
 
                    "repositories": {"repo1": "repository.none"},
 
                    "repositories_groups": {"Group1": "group.read"}
 
                 },
 
            }
 
    error:  null
 

	
 
get_users
 
---------
 

	
 
List all existing users.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_users"
 
    args :    { }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: [
 
              {
 
                "user_id" :     "<user_id>",
 
                "api_key" :     "<api_key>",
 
                "username" :    "<username>",
 
                "firstname":    "<firstname>",
 
                "lastname" :    "<lastname>",
 
                "email" :       "<email>",
 
                "emails":       "<list_of_all_additional_emails>",
 
                "ip_addresses": "<list_of_ip_addresses_for_user>",
 
                "active" :      "<bool>",
 
                "admin" :       "<bool>",
 
                "ldap_dn" :     "<ldap_dn>",
 
                "last_login":   "<last_login>",
 
              },
 
 
            ]
 
    error:  null
 

	
 
.. _create-user:
 

	
 
create_user
 
-----------
 

	
 
Create new user.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "create_user"
 
    args :    {
 
                "username" :  "<username>",
 
                "email" :     "<useremail>",
 
                "password" :  "<password = Optional(None)>",
 
                "firstname" : "<firstname> = Optional(None)",
 
                "lastname" :  "<lastname> = Optional(None)",
 
                "active" :    "<bool> = Optional(True)",
 
                "admin" :     "<bool> = Optional(False)",
 
                "ldap_dn" :   "<ldap_dn> = Optional(None)"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "msg" : "created new user `<username>`",
 
              "user": {
 
                "user_id" :  "<user_id>",
 
                "username" : "<username>",
 
                "firstname": "<firstname>",
 
                "lastname" : "<lastname>",
 
                "email" :    "<email>",
 
                "emails":    "<list_of_all_additional_emails>",
 
                "active" :   "<bool>",
 
                "admin" :    "<bool>",
 
                "ldap_dn" :  "<ldap_dn>",
 
                "last_login": "<last_login>",
 
              },
 
            }
 
    error:  null
 

	
 
Example::
 

	
 
    kallithea-api create_user username:bent email:bent@example.com firstname:Bent lastname:Bentsen extern_type:ldap extern_name:uid=bent,dc=example,dc=com
 

	
 
update_user
 
-----------
 

	
 
Update the given user if such user exists.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "update_user"
 
    args :    {
 
                "userid" : "<user_id or username>",
 
                "username" :  "<username> = Optional(None)",
 
                "email" :     "<useremail> = Optional(None)",
 
                "password" :  "<password> = Optional(None)",
 
                "firstname" : "<firstname> = Optional(None)",
 
                "lastname" :  "<lastname> = Optional(None)",
 
                "active" :    "<bool> = Optional(None)",
 
                "admin" :     "<bool> = Optional(None)",
 
                "ldap_dn" :   "<ldap_dn> = Optional(None)"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "msg" : "updated user ID:<userid> <username>",
 
              "user": {
 
                "user_id" :  "<user_id>",
 
                "api_key" :  "<api_key>",
 
                "username" : "<username>",
 
                "firstname": "<firstname>",
 
                "lastname" : "<lastname>",
 
                "email" :    "<email>",
 
                "emails":    "<list_of_all_additional_emails>",
 
                "active" :   "<bool>",
 
                "admin" :    "<bool>",
 
                "ldap_dn" :  "<ldap_dn>",
 
                "last_login": "<last_login>",
 
              },
 
            }
 
    error:  null
 

	
 
delete_user
 
-----------
 

	
 
Delete the given user if such a user exists.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "delete_user"
 
    args :    {
 
                "userid" : "<user_id or username>",
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "msg" : "deleted user ID:<userid> <username>",
 
              "user": null
 
            }
 
    error:  null
 

	
 
get_user_group
 
--------------
 

	
 
Get an existing user group.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_user_group"
 
    args :    {
 
                "usergroupid" : "<user group id or name>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : None if group not exist
 
             {
 
               "users_group_id" : "<id>",
 
               "group_name" :     "<groupname>",
 
               "active":          "<bool>",
 
               "members" :  [
 
                              {
 
                                "user_id" :  "<user_id>",
 
                                "api_key" :  "<api_key>",
 
                                "username" : "<username>",
 
                                "firstname": "<firstname>",
 
                                "lastname" : "<lastname>",
 
                                "email" :    "<email>",
 
                                "emails":    "<list_of_all_additional_emails>",
 
                                "active" :   "<bool>",
 
                                "admin" :    "<bool>",
 
                                "ldap_dn" :  "<ldap_dn>",
 
                                "last_login": "<last_login>",
 
                              },
 
 
                            ]
 
             }
 
    error : null
 

	
 
get_user_groups
 
---------------
 

	
 
List all existing user groups.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_user_groups"
 
    args :    { }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result : [
 
               {
 
               "users_group_id" : "<id>",
 
               "group_name" :     "<groupname>",
 
               "active":          "<bool>",
 
               },
 
 
              ]
 
    error : null
 

	
 
create_user_group
 
-----------------
 

	
 
Create a new user group.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "create_user_group"
 
    args:     {
 
                "group_name": "<groupname>",
 
                "owner" :     "<owner_name_or_id = Optional(=apiuser)>",
 
                "active":     "<bool> = Optional(True)"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "msg": "created new user group `<groupname>`",
 
              "users_group": {
 
                     "users_group_id" : "<id>",
 
                     "group_name" :     "<groupname>",
 
                     "active":          "<bool>",
 
               },
 
            }
 
    error:  null
 

	
 
add_user_to_user_group
 
----------------------
 

	
 
Adds a user to a user group. If the user already is in that group, success will be
 
``false``.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "add_user_user_group"
 
    args:     {
 
                "usersgroupid" : "<user group id or name>",
 
                "userid" : "<user_id or username>",
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "success": True|False # depends on if member is in group
 
              "msg": "added member `<username>` to a user group `<groupname>` |
 
                      User is already in that group"
 
            }
 
    error:  null
 

	
 
remove_user_from_user_group
 
---------------------------
 

	
 
Remove a user from a user group. If the user isn't in the given group, success will
 
be ``false``.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "remove_user_from_user_group"
 
    args:     {
 
                "usersgroupid" : "<user group id or name>",
 
                "userid" : "<user_id or username>",
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "success":  True|False,  # depends on if member is in group
 
              "msg": "removed member <username> from user group <groupname> |
 
                      User wasn't in group"
 
            }
 
    error:  null
 

	
 
get_repo
 
--------
 

	
 
Get an existing repository by its name or repository_id. Members will contain
 
either users_group or users associated to that repository.
 
This command can only be executed using the api_key of a user with admin rights,
 
or that of a regular user with at least read access to the repository.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_repo"
 
    args:     {
 
                "repoid" : "<reponame or repo_id>"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: None if repository does not exist or
 
            {
 
                "repo_id" :          "<repo_id>",
 
                "repo_name" :        "<reponame>"
 
                "repo_type" :        "<repo_type>",
 
                "clone_uri" :        "<clone_uri>",
 
                "enable_downloads":  "<bool>",
 
                "enable_locking":    "<bool>",
 
                "enable_statistics": "<bool>",
 
                "private":           "<bool>",
 
                "created_on" :       "<date_time_created>",
 
                "description" :      "<description>",
 
                "landing_rev":       "<landing_rev>",
 
                "last_changeset":    {
 
                                       "author":   "<full_author>",
 
                                       "date":     "<date_time_of_commit>",
 
                                       "message":  "<commit_message>",
 
                                       "raw_id":   "<raw_id>",
 
                                       "revision": "<numeric_revision>",
 
                                       "short_id": "<short_id>"
 
                                     }
 
                "owner":             "<repo_owner>",
 
                "fork_of":           "<name_of_fork_parent>",
 
                "members" :     [
 
                                  {
 
                                    "type":        "user",
 
                                    "user_id" :    "<user_id>",
 
                                    "api_key" :    "<api_key>",
 
                                    "username" :   "<username>",
 
                                    "firstname":   "<firstname>",
 
                                    "lastname" :   "<lastname>",
 
                                    "email" :      "<email>",
 
                                    "emails":      "<list_of_all_additional_emails>",
 
                                    "active" :     "<bool>",
 
                                    "admin" :      "<bool>",
 
                                    "ldap_dn" :    "<ldap_dn>",
 
                                    "last_login":  "<last_login>",
 
                                    "permission" : "repository.(read|write|admin)"
 
                                  },
 
 
                                  {
 
                                    "type":      "users_group",
 
                                    "id" :       "<usersgroupid>",
 
                                    "name" :     "<usersgroupname>",
 
                                    "active":    "<bool>",
 
                                    "permission" : "repository.(read|write|admin)"
 
                                  },
 
 
                                ]
 
                 "followers":   [
 
                                  {
 
                                    "user_id" :     "<user_id>",
 
                                    "username" :    "<username>",
 
                                    "api_key" :     "<api_key>",
 
                                    "firstname":    "<firstname>",
 
                                    "lastname" :    "<lastname>",
 
                                    "email" :       "<email>",
 
                                    "emails":       "<list_of_all_additional_emails>",
 
                                    "ip_addresses": "<list_of_ip_addresses_for_user>",
 
                                    "active" :      "<bool>",
 
                                    "admin" :       "<bool>",
 
                                    "ldap_dn" :     "<ldap_dn>",
 
                                    "last_login":   "<last_login>",
 
                                  },
 
 
                 ]
 
            }
 
    error:  null
 

	
 
get_repos
 
---------
 

	
 
List all existing repositories.
 
This command can only be executed using the api_key of a user with admin rights,
 
or that of a regular user with at least read access to the repository.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_repos"
 
    args:     { }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: [
 
              {
 
                "repo_id" :          "<repo_id>",
 
                "repo_name" :        "<reponame>"
 
                "repo_type" :        "<repo_type>",
 
                "clone_uri" :        "<clone_uri>",
 
                "private" :          "<bool>",
 
                "created_on" :       "<datetimecreated>",
 
                "description" :      "<description>",
 
                "landing_rev":       "<landing_rev>",
 
                "owner":             "<repo_owner>",
 
                "fork_of":           "<name_of_fork_parent>",
 
                "enable_downloads":  "<bool>",
 
                "enable_locking":    "<bool>",
 
                "enable_statistics": "<bool>",
 
              },
 
 
            ]
 
    error:  null
 

	
 
get_repo_nodes
 
--------------
 

	
 
Return a list of files and directories for a given path at the given revision.
 
It is possible to specify ret_type to show only ``files`` or ``dirs``.
 
This command can only be executed using the api_key of a user with admin rights.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "get_repo_nodes"
 
    args:     {
 
                "repoid" : "<reponame or repo_id>"
 
                "revision"  : "<revision>",
 
                "root_path" : "<root_path>",
 
                "ret_type"  : "<ret_type> = Optional('all')"
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: [
 
              {
 
                "name" :        "<name>"
 
                "type" :        "<type>",
 
              },
 
 
            ]
 
    error:  null
 

	
 
create_repo
 
-----------
 

	
 
Create a repository. If the repository name contains "/", all needed repository
 
groups will be created. For example "foo/bar/baz" will create repository groups
 
"foo", "bar" (with "foo" as parent), and create "baz" repository with
 
"bar" as group.
 
This command can only be executed using the api_key of a user with admin rights,
 
or that of a regular user with create repository permission.
 
Regular users cannot specify owner parameter.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "create_repo"
 
    args:     {
 
                "repo_name" :        "<reponame>",
 
                "owner" :            "<owner_name_or_id = Optional(=apiuser)>",
 
                "repo_type" :        "<repo_type> = Optional('hg')",
 
                "description" :      "<description> = Optional('')",
 
                "private" :          "<bool> = Optional(False)",
 
                "clone_uri" :        "<clone_uri> = Optional(None)",
 
                "landing_rev" :      "<landing_rev> = Optional('tip')",
 
                "enable_downloads":  "<bool> = Optional(False)",
 
                "enable_locking":    "<bool> = Optional(False)",
 
                "enable_statistics": "<bool> = Optional(False)",
 
              }
 

	
 
OUTPUT::
 

	
 
    id : <id_given_in_input>
 
    result: {
 
              "msg": "Created new repository `<reponame>`",
 
              "repo": {
 
                "repo_id" :          "<repo_id>",
 
                "repo_name" :        "<reponame>"
 
                "repo_type" :        "<repo_type>",
 
                "clone_uri" :        "<clone_uri>",
 
                "private" :          "<bool>",
 
                "created_on" :       "<datetimecreated>",
 
                "description" :      "<description>",
 
                "landing_rev":       "<landing_rev>",
 
                "owner":             "<username or user_id>",
 
                "fork_of":           "<name_of_fork_parent>",
 
                "enable_downloads":  "<bool>",
 
                "enable_locking":    "<bool>",
 
                "enable_statistics": "<bool>",
 
              },
 
            }
 
    error:  null
 

	
 
update_repo
 
-----------
 

	
 
Update a repository.
 
This command can only be executed using the api_key of a user with admin rights,
 
or that of a regular user with create repository permission.
 
Regular users cannot specify owner parameter.
 

	
 
INPUT::
 

	
 
    id : <id_for_response>
 
    api_key : "<api_key>"
 
    method :  "update_repo"
 
    args:     {
 
                "repoid" :           "<reponame or repo_id>"
 
                "name" :             "<reponame> = Optional('')",
 
                "group" :            "<group_id> = Optional(None)",
 
                "owner" :            "<owner_name_or_id = Optional(=apiuser)>",
 
                "description" :      "<description> = Optional('')",
docs/setup.rst
Show inline comments
 
.. _setup:
 

	
 
=====
 
Setup
 
=====
 

	
 

	
 
Setting up Kallithea
 
--------------------
 

	
 
First, you will need to create a Kallithea configuration file. Run the
 
following command to do so::
 

	
 
    paster make-config Kallithea my.ini
 

	
 
This will create the file ``my.ini`` in the current directory. This
 
configuration file contains the various settings for Kallithea, e.g.
 
proxy port, email settings, usage of static files, cache, Celery
 
settings, and logging.
 

	
 
Next, you need to create the databases used by Kallithea. It is recommended to
 
use PostgreSQL or SQLite (default). If you choose a database other than the
 
default, ensure you properly adjust the database URL in your ``my.ini``
 
configuration file to use this other database. Kallithea currently supports
 
PostgreSQL, SQLite and MySQL databases. Create the database by running
 
the following command::
 

	
 
    paster setup-db my.ini
 

	
 
This will prompt you for a "root" path. This "root" path is the location where
 
Kallithea will store all of its repositories on the current machine. After
 
entering this "root" path ``setup-db`` will also prompt you for a username
 
and password for the initial admin account which ``setup-db`` sets
 
up for you.
 

	
 
The ``setup-db`` values can also be given on the command line.
 
Example::
 

	
 
    paster setup-db my.ini --user=nn --password=secret --email=nn@example.org --repos=/srv/repos
 
    paster setup-db my.ini --user=nn --password=secret --email=nn@example.com --repos=/srv/repos
 

	
 
The ``setup-db`` command will create all needed tables and an
 
admin account. When choosing a root path you can either use a new
 
empty location, or a location which already contains existing
 
repositories. If you choose a location which contains existing
 
repositories Kallithea will add all of the repositories at the chosen
 
location to its database.  (Note: make sure you specify the correct
 
path to the root).
 

	
 
.. note:: the given path for Mercurial_ repositories **must** be write
 
          accessible for the application. It's very important since
 
          the Kallithea web interface will work without write access,
 
          but when trying to do a push it will fail with permission
 
          denied errors unless it has write access.
 

	
 
You are now ready to use Kallithea. To run it simply execute::
 

	
 
    paster serve my.ini
 

	
 
- This command runs the Kallithea server. The web app should be available at
 
  http://127.0.0.1:5000. The IP address and port is configurable via the
 
  configuration file created in the previous step.
 
- Log in to Kallithea using the admin account created when running ``setup-db``.
 
- The default permissions on each repository is read, and the owner is admin.
 
  Remember to update these if needed.
 
- In the admin panel you can toggle LDAP, anonymous, and permissions
 
  settings, as well as edit more advanced options on users and
 
  repositories.
 

	
 

	
 
Extensions
 
----------
 

	
 
Optionally one can create an ``rcextensions`` package that extends Kallithea
 
functionality.
 
To generate a skeleton extensions package, run::
 

	
 
    paster make-rcext my.ini
 

	
 
This will create an ``rcextensions`` package next to the specified ``ini`` file.
 
With ``rcextensions`` it's possible to add additional mapping for whoosh,
 
stats and add additional code into the push/pull/create/delete repo hooks,
 
for example for sending signals to build-bots such as Jenkins.
 

	
 
See the ``__init__.py`` file inside the generated ``rcextensions`` package
 
for more details.
 

	
 

	
 
Using Kallithea with SSH
 
------------------------
 

	
 
Kallithea currently only hosts repositories using http and https. (The addition
 
of ssh hosting is a planned future feature.) However you can easily use ssh in
 
parallel with Kallithea. (Repository access via ssh is a standard "out of
 
the box" feature of Mercurial_ and you can use this to access any of the
 
repositories that Kallithea is hosting. See PublishingRepositories_)
 

	
 
Kallithea repository structures are kept in directories with the same name
 
as the project. When using repository groups, each group is a subdirectory.
 
This allows you to easily use ssh for accessing repositories.
 

	
 
In order to use ssh you need to make sure that your web server and the users'
 
login accounts have the correct permissions set on the appropriate directories.
 

	
 
.. note:: These permissions are independent of any permissions you
 
          have set up using the Kallithea web interface.
 

	
 
If your main directory (the same as set in Kallithea settings) is for
 
example set to ``/srv/repos`` and the repository you are using is
 
named ``kallithea``, then to clone via ssh you should run::
 

	
 
    hg clone ssh://user@server.com//srv/repos/kallithea
 
    hg clone ssh://user@kallithea.example.com/srv/repos/kallithea
 

	
 
Using other external tools such as mercurial-server_ or using ssh key-based
 
authentication is fully supported.
 

	
 
.. note:: In an advanced setup, in order for your ssh access to use
 
          the same permissions as set up via the Kallithea web
 
          interface, you can create an authentication hook to connect
 
          to the Kallithea db and run check functions for permissions
 
          against that.
 

	
 

	
 
Setting up Whoosh full text search
 
----------------------------------
 

	
 
Kallithea provides full text search of repositories using `Whoosh`__.
 

	
 
.. __: https://pythonhosted.org/Whoosh/
 

	
 
For an incremental index build, run::
 

	
 
    paster make-index my.ini
 

	
 
For a full index rebuild, run::
 

	
 
    paster make-index my.ini -f
 

	
 
The ``--repo-location`` option allows the location of the repositories to be overriden;
 
usually, the location is retrieved from the Kallithea database.
 

	
 
The ``--index-only`` option can be used to limit the indexed repositories to a comma-separated list::
 

	
 
    paster make-index my.ini --index-only=vcs,kallithea
 

	
 
To keep your index up-to-date it is necessary to do periodic index builds;
 
for this, it is recommended to use a crontab entry. Example::
 

	
 
    0  3  *  *  *  /path/to/virtualenv/bin/paster make-index /path/to/kallithea/my.ini
 

	
 
When using incremental mode (the default), Whoosh will check the last
 
modification date of each file and add it to be reindexed if a newer file is
 
available. The indexing daemon checks for any removed files and removes them
 
from index.
 

	
 
If you want to rebuild the index from scratch, you can use the ``-f`` flag as above,
 
or in the admin panel you can check the "build from scratch" checkbox.
 

	
 

	
 
Setting up LDAP support
 
-----------------------
 

	
 
Kallithea supports LDAP authentication. In order
 
to use LDAP, you have to install the python-ldap_ package. This package is
 
available via PyPI, so you can install it by running::
 

	
 
    pip install python-ldap
 

	
 
.. note:: ``python-ldap`` requires some libraries to be installed on
 
          your system, so before installing it check that you have at
 
          least the ``openldap`` and ``sasl`` libraries.
 

	
 
Choose *Admin > Authentication*, click the ``kallithea.lib.auth_modules.auth_ldap`` button
 
and then *Save*, to enable the LDAP plugin and configure its settings.
 

	
 
Here's a typical LDAP setup::
 

	
 
 Connection settings
 
 Enable LDAP          = checked
 
 Host                 = host.example.org
 
 Host                 = host.example.com
 
 Port                 = 389
 
 Account              = <account>
 
 Password             = <password>
 
 Connection Security  = LDAPS connection
 
 Certificate Checks   = DEMAND
 

	
 
 Search settings
 
 Base DN              = CN=users,DC=host,DC=example,DC=org
 
 LDAP Filter          = (&(objectClass=user)(!(objectClass=computer)))
 
 LDAP Search Scope    = SUBTREE
 

	
 
 Attribute mappings
 
 Login Attribute      = uid
 
 First Name Attribute = firstName
 
 Last Name Attribute  = lastName
 
 Email Attribute      = mail
 

	
 
If your user groups are placed in an Organisation Unit (OU) structure, the Search Settings configuration differs::
 

	
 
 Search settings
 
 Base DN              = DC=host,DC=example,DC=org
 
 LDAP Filter          = (&(memberOf=CN=your user group,OU=subunit,OU=unit,DC=host,DC=example,DC=org)(objectClass=user))
 
 LDAP Search Scope    = SUBTREE
 

	
 
.. _enable_ldap:
 

	
 
Enable LDAP : required
 
    Whether to use LDAP for authenticating users.
 

	
 
.. _ldap_host:
 

	
 
Host : required
 
    LDAP server hostname or IP address. Can be also a comma separated
 
    list of servers to support LDAP fail-over.
 

	
 
.. _Port:
 

	
 
Port : required
 
    389 for un-encrypted LDAP, 636 for SSL-encrypted LDAP.
 

	
 
.. _ldap_account:
 

	
 
Account : optional
 
    Only required if the LDAP server does not allow anonymous browsing of
 
    records.  This should be a special account for record browsing.  This
 
    will require `LDAP Password`_ below.
 

	
 
.. _LDAP Password:
 

	
 
Password : optional
 
    Only required if the LDAP server does not allow anonymous browsing of
 
    records.
 

	
 
.. _Enable LDAPS:
 

	
 
Connection Security : required
 
    Defines the connection to LDAP server
 

	
 
    No encryption
 
        Plain non encrypted connection
 

	
 
    LDAPS connection
 
        Enable LDAPS connections. It will likely require `Port`_ to be set to
 
        a different value (standard LDAPS port is 636). When LDAPS is enabled
 
        then `Certificate Checks`_ is required.
 

	
 
    START_TLS on LDAP connection
 
        START TLS connection
 

	
 
.. _Certificate Checks:
 

	
 
Certificate Checks : optional
 
    How SSL certificates verification is handled -- this is only useful when
 
    `Enable LDAPS`_ is enabled.  Only DEMAND or HARD offer full SSL security
 
    while the other options are susceptible to man-in-the-middle attacks.  SSL
 
    certificates can be installed to /etc/openldap/cacerts so that the
 
    DEMAND or HARD options can be used with self-signed certificates or
 
    certificates that do not have traceable certificates of authority.
 

	
 
    NEVER
 
        A serve certificate will never be requested or checked.
 

	
 
    ALLOW
 
        A server certificate is requested.  Failure to provide a
 
        certificate or providing a bad certificate will not terminate the
 
        session.
 

	
 
    TRY
 
        A server certificate is requested.  Failure to provide a
 
        certificate does not halt the session; providing a bad certificate
 
        halts the session.
 

	
 
    DEMAND
 
        A server certificate is requested and must be provided and
 
        authenticated for the session to proceed.
 

	
 
    HARD
 
        The same as DEMAND.
 

	
 
.. _Base DN:
 

	
 
Base DN : required
 
    The Distinguished Name (DN) where searches for users will be performed.
 
    Searches can be controlled by `LDAP Filter`_ and `LDAP Search Scope`_.
 

	
 
.. _LDAP Filter:
 

	
 
LDAP Filter : optional
 
    A LDAP filter defined by RFC 2254.  This is more useful when `LDAP
 
    Search Scope`_ is set to SUBTREE.  The filter is useful for limiting
 
    which LDAP objects are identified as representing Users for
 
    authentication.  The filter is augmented by `Login Attribute`_ below.
 
    This can commonly be left blank.
 

	
 
.. _LDAP Search Scope:
 

	
 
LDAP Search Scope : required
 
    This limits how far LDAP will search for a matching object.
 

	
 
    BASE
 
        Only allows searching of `Base DN`_ and is usually not what you
 
        want.
 

	
 
    ONELEVEL
 
        Searches all entries under `Base DN`_, but not Base DN itself.
 

	
 
    SUBTREE
 
        Searches all entries below `Base DN`_, but not Base DN itself.
 
        When using SUBTREE `LDAP Filter`_ is useful to limit object
 
        location.
 

	
 
.. _Login Attribute:
 

	
 
Login Attribute : required
 
    The LDAP record attribute that will be matched as the USERNAME or
 
    ACCOUNT used to connect to Kallithea.  This will be added to `LDAP
 
    Filter`_ for locating the User object.  If `LDAP Filter`_ is specified as
 
    "LDAPFILTER", `Login Attribute`_ is specified as "uid" and the user has
 
    connected as "jsmith" then the `LDAP Filter`_ will be augmented as below
 
    ::
 

	
 
        (&(LDAPFILTER)(uid=jsmith))
 

	
 
.. _ldap_attr_firstname:
 

	
 
First Name Attribute : required
 
    The LDAP record attribute which represents the user's first name.
 

	
 
.. _ldap_attr_lastname:
 

	
 
Last Name Attribute : required
 
    The LDAP record attribute which represents the user's last name.
 

	
 
.. _ldap_attr_email:
 

	
 
Email Attribute : required
 
    The LDAP record attribute which represents the user's email address.
 

	
 
If all data are entered correctly, and python-ldap_ is properly installed
 
users should be granted access to Kallithea with LDAP accounts.  At this
 
time user information is copied from LDAP into the Kallithea user database.
 
This means that updates of an LDAP user object may not be reflected as a
 
user update in Kallithea.
 

	
 
If You have problems with LDAP access and believe You entered correct
 
information check out the Kallithea logs, any error messages sent from LDAP
 
will be saved there.
 

	
 
Active Directory
 
''''''''''''''''
 

	
 
Kallithea can use Microsoft Active Directory for user authentication.  This
 
is done through an LDAP or LDAPS connection to Active Directory.  The
 
following LDAP configuration settings are typical for using Active
 
Directory ::
 

	
 
 Base DN              = OU=SBSUsers,OU=Users,OU=MyBusiness,DC=v3sys,DC=local
 
 Login Attribute      = sAMAccountName
 
 First Name Attribute = givenName
 
 Last Name Attribute  = sn
 
 Email Attribute     = mail
 

	
 
All other LDAP settings will likely be site-specific and should be
 
appropriately configured.
 

	
 

	
 
Authentication by container or reverse-proxy
 
--------------------------------------------
 

	
 
Kallithea supports delegating the authentication
 
of users to its WSGI container, or to a reverse-proxy server through which all
 
clients access the application.
 

	
 
When these authentication methods are enabled in Kallithea, it uses the
 
username that the container/proxy (Apache or Nginx, etc.) provides and doesn't
 
perform the authentication itself. The authorization, however, is still done by
 
Kallithea according to its settings.
 

	
 
When a user logs in for the first time using these authentication methods,
 
a matching user account is created in Kallithea with default permissions. An
 
administrator can then modify it using Kallithea's admin interface.
 

	
 
It's also possible for an administrator to create accounts and configure their
 
permissions before the user logs in for the first time, using the :ref:`create-user` API.
 

	
 
Container-based authentication
 
''''''''''''''''''''''''''''''
 

	
 
In a container-based authentication setup, Kallithea reads the user name from
 
the ``REMOTE_USER`` server variable provided by the WSGI container.
 

	
 
After setting up your container (see `Apache with mod_wsgi`_), you'll need
 
to configure it to require authentication on the location configured for
 
Kallithea.
 

	
 
Proxy pass-through authentication
 
'''''''''''''''''''''''''''''''''
 

	
 
In a proxy pass-through authentication setup, Kallithea reads the user name
 
from the ``X-Forwarded-User`` request header, which should be configured to be
 
sent by the reverse-proxy server.
 

	
 
After setting up your proxy solution (see `Apache virtual host reverse proxy example`_,
 
`Apache as subdirectory`_ or `Nginx virtual host example`_), you'll need to
 
configure the authentication and add the username in a request header named
 
``X-Forwarded-User``.
 

	
 
For example, the following config section for Apache sets a subdirectory in a
 
reverse-proxy setup with basic auth:
 

	
 
.. code-block:: apache
 

	
 
    <Location /someprefix>
 
      ProxyPass http://127.0.0.1:5000/someprefix
 
      ProxyPassReverse http://127.0.0.1:5000/someprefix
 
      SetEnvIf X-Url-Scheme https HTTPS=1
 

	
 
      AuthType Basic
 
      AuthName "Kallithea authentication"
 
      AuthUserFile /srv/kallithea/.htpasswd
 
      Require valid-user
 

	
 
      RequestHeader unset X-Forwarded-User
 

	
 
      RewriteEngine On
 
      RewriteCond %{LA-U:REMOTE_USER} (.+)
 
      RewriteRule .* - [E=RU:%1]
 
      RequestHeader set X-Forwarded-User %{RU}e
 
    </Location>
 

	
 
.. note::
 
   If you enable proxy pass-through authentication, make sure your server is
 
   only accessible through the proxy. Otherwise, any client would be able to
 
   forge the authentication header and could effectively become authenticated
 
   using any account of their liking.
 

	
 

	
 
Integration with issue trackers
 
-------------------------------
 

	
 
Kallithea provides a simple integration with issue trackers. It's possible
 
to define a regular expression that will match an issue ID in commit messages,
 
and have that replaced with a URL to the issue. To enable this simply
 
uncomment the following variables in the ini file::
 

	
 
    issue_pat = (?:^#|\s#)(\w+)
 
    issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 
    issue_server_link = https://issues.example.com/{repo}/issue/{id}
 
    issue_prefix = #
 

	
 
``issue_pat`` is the regular expression describing which strings in
 
commit messages will be treated as issue references. A match group in
 
parentheses should be used to specify the actual issue id.
 

	
 
The default expression matches issues in the format ``#<number>``, e.g., ``#300``.
 

	
 
Matched issue references are replaced with the link specified in
 
``issue_server_link``. ``{id}`` is replaced with the issue ID, and
 
``{repo}`` with the repository name.  Since the # is stripped away,
 
``issue_prefix`` is prepended to the link text.  ``issue_prefix`` doesn't
 
necessarily need to be ``#``: if you set issue prefix to ``ISSUE-`` this will
 
generate a URL in the format:
 

	
 
.. code-block:: html
 

	
 
  <a href="https://myissueserver.com/example_repo/issue/300">ISSUE-300</a>
 
  <a href="https://issues.example.com/example_repo/issue/300">ISSUE-300</a>
 

	
 
If needed, more than one pattern can be specified by appending a unique suffix to
 
the variables. For example::
 

	
 
    issue_pat_wiki = (?:wiki-)(.+)
 
    issue_server_link_wiki = https://mywiki.com/{id}
 
    issue_server_link_wiki = https://wiki.example.com/{id}
 
    issue_prefix_wiki = WIKI-
 

	
 
With these settings, wiki pages can be referenced as wiki-some-id, and every
 
such reference will be transformed into:
 

	
 
.. code-block:: html
 

	
 
  <a href="https://mywiki.com/some-id">WIKI-some-id</a>
 
  <a href="https://wiki.example.com/some-id">WIKI-some-id</a>
 

	
 

	
 
Hook management
 
---------------
 

	
 
Hooks can be managed in similar way to that used in ``.hgrc`` files.
 
To manage hooks, choose *Admin > Settings > Hooks*.
 

	
 
The built-in hooks cannot be modified, though they can be enabled or disabled in the *VCS* section.
 

	
 
To add another custom hook simply fill in the first textbox with
 
``<name>.<hook_type>`` and the second with the hook path. Example hooks
 
can be found in ``kallithea.lib.hooks``.
 

	
 

	
 
Changing default encoding
 
-------------------------
 

	
 
By default, Kallithea uses UTF-8 encoding.
 
This is configurable as ``default_encoding`` in the .ini file.
 
This affects many parts in Kallithea including user names, filenames, and
 
encoding of commit messages. In addition Kallithea can detect if the ``chardet``
 
library is installed. If ``chardet`` is detected Kallithea will fallback to it
 
when there are encode/decode errors.
 

	
 

	
 
Celery configuration
 
--------------------
 

	
 
Kallithea can use the distributed task queue system Celery_ to run tasks like
 
cloning repositories or sending emails.
 

	
 
Kallithea will in most setups work perfectly fine out of the box (without
 
Celery), executing all tasks in the web server process. Some tasks can however
 
take some time to run and it can be better to run such tasks asynchronously in
 
a separate process so the web server can focus on serving web requests.
 

	
 
For installation and configuration of Celery, see the `Celery documentation`_.
 
Note that Celery requires a message broker service like RabbitMQ_ (recommended)
 
or Redis_.
 

	
 
The use of Celery is configured in the Kallithea ini configuration file.
 
To enable it, simply set::
 

	
 
  use_celery = true
 

	
 
and add or change the ``celery.*`` and ``broker.*`` configuration variables.
 

	
 
Remember that the ini files use the format with '.' and not with '_' like
 
Celery. So for example setting `BROKER_HOST` in Celery means setting
 
`broker.host` in the configuration file.
 

	
 
To start the Celery process, run::
 

	
 
 paster celeryd <configfile.ini>
 

	
 
.. note::
 
   Make sure you run this command from the same virtualenv, and with the same
 
   user that Kallithea runs.
 

	
 

	
 
HTTPS support
 
-------------
 

	
 
Kallithea will by default generate URLs based on the WSGI environment.
 

	
 
Alternatively, you can use some special configuration settings to control
 
directly which scheme/protocol Kallithea will use when generating URLs:
 

	
 
- With ``https_fixup = true``, the scheme will be taken from the
 
  ``X-Url-Scheme``, ``X-Forwarded-Scheme`` or ``X-Forwarded-Proto`` HTTP header
 
  (default ``http``).
 
- With ``force_https = true`` the default will be ``https``.
 
- With ``use_htsts = true``, Kallithea will set ``Strict-Transport-Security`` when using https.
 

	
 

	
 
Nginx virtual host example
 
--------------------------
 

	
 
Sample config for Nginx using proxy:
 

	
 
.. code-block:: nginx
 

	
 
    upstream kallithea {
 
        server 127.0.0.1:5000;
 
        # add more instances for load balancing
 
        #server 127.0.0.1:5001;
 
        #server 127.0.0.1:5002;
 
    }
 

	
 
    ## gist alias
 
    server {
 
       listen          443;
 
       server_name     gist.myserver.com;
 
       server_name     gist.example.com;
 
       access_log      /var/log/nginx/gist.access.log;
 
       error_log       /var/log/nginx/gist.error.log;
 

	
 
       ssl on;
 
       ssl_certificate     gist.your.kallithea.server.crt;
 
       ssl_certificate_key gist.your.kallithea.server.key;
 

	
 
       ssl_session_timeout 5m;
 

	
 
       ssl_protocols SSLv3 TLSv1;
 
       ssl_ciphers DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:EDH-RSA-DES-CBC3-SHA:AES256-SHA:DES-CBC3-SHA:AES128-SHA:RC4-SHA:RC4-MD5;
 
       ssl_prefer_server_ciphers on;
 

	
 
       rewrite ^/(.+)$ https://your.kallithea.server/_admin/gists/$1;
 
       rewrite (.*)    https://your.kallithea.server/_admin/gists;
 
       rewrite ^/(.+)$ https://kallithea.example.com/_admin/gists/$1;
 
       rewrite (.*)    https://kallithea.example.com/_admin/gists;
 
    }
 

	
 
    server {
 
       listen          443;
 
       server_name     your.kallithea.server;
 
       server_name     kallithea.example.com
 
       access_log      /var/log/nginx/kallithea.access.log;
 
       error_log       /var/log/nginx/kallithea.error.log;
 

	
 
       ssl on;
 
       ssl_certificate     your.kallithea.server.crt;
 
       ssl_certificate_key your.kallithea.server.key;
 

	
 
       ssl_session_timeout 5m;
 

	
 
       ssl_protocols SSLv3 TLSv1;
 
       ssl_ciphers DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:EDH-RSA-DES-CBC3-SHA:AES256-SHA:DES-CBC3-SHA:AES128-SHA:RC4-SHA:RC4-MD5;
 
       ssl_prefer_server_ciphers on;
 

	
 
       ## uncomment root directive if you want to serve static files by nginx
 
       ## requires static_files = false in .ini file
 
       #root /path/to/installation/kallithea/public;
 
       include         /etc/nginx/proxy.conf;
 
       location / {
 
            try_files $uri @kallithea;
 
       }
 

	
 
       location @kallithea {
 
            proxy_pass      http://127.0.0.1:5000;
 
       }
 

	
 
    }
 

	
 
Here's the proxy.conf. It's tuned so it will not timeout on long
 
pushes or large pushes::
 

	
 
    proxy_redirect              off;
 
    proxy_set_header            Host $host;
 
    ## needed for container auth
 
    #proxy_set_header            REMOTE_USER $remote_user;
 
    #proxy_set_header            X-Forwarded-User $remote_user;
 
    proxy_set_header            X-Url-Scheme $scheme;
 
    proxy_set_header            X-Host $http_host;
 
    proxy_set_header            X-Real-IP $remote_addr;
 
    proxy_set_header            X-Forwarded-For $proxy_add_x_forwarded_for;
 
    proxy_set_header            Proxy-host $proxy_host;
 
    proxy_buffering             off;
 
    proxy_connect_timeout       7200;
 
    proxy_send_timeout          7200;
 
    proxy_read_timeout          7200;
 
    proxy_buffers               8 32k;
 
    client_max_body_size        1024m;
 
    client_body_buffer_size     128k;
 
    large_client_header_buffers 8 64k;
 

	
 

	
 
Apache virtual host reverse proxy example
 
-----------------------------------------
 

	
 
Here is a sample configuration file for Apache using proxy:
 

	
 
.. code-block:: apache
 

	
 
    <VirtualHost *:80>
 
            ServerName hg.myserver.com
 
            ServerName kallithea.example.com
 

	
 
            <Proxy *>
 
              # For Apache 2.4 and later:
 
              Require all granted
 

	
 
              # For Apache 2.2 and earlier, instead use:
 
              # Order allow,deny
 
              # Allow from all
 
            </Proxy>
 

	
 
            #important !
 
            #Directive to properly generate url (clone url) for pylons
 
            ProxyPreserveHost On
 

	
 
            #kallithea instance
 
            ProxyPass / http://127.0.0.1:5000/
 
            ProxyPassReverse / http://127.0.0.1:5000/
 

	
 
            #to enable https use line below
 
            #SetEnvIf X-Url-Scheme https HTTPS=1
 
    </VirtualHost>
 

	
 
Additional tutorial
 
http://pylonsbook.com/en/1.1/deployment.html#using-apache-to-proxy-requests-to-pylons
 

	
 

	
 
Apache as subdirectory
 
----------------------
 

	
 
Apache subdirectory part:
 

	
 
.. code-block:: apache
 

	
 
    <Location /<someprefix> >
 
      ProxyPass http://127.0.0.1:5000/<someprefix>
 
      ProxyPassReverse http://127.0.0.1:5000/<someprefix>
 
      SetEnvIf X-Url-Scheme https HTTPS=1
 
    </Location>
 

	
 
Besides the regular apache setup you will need to add the following line
 
into ``[app:main]`` section of your .ini file::
 

	
 
    filter-with = proxy-prefix
 

	
 
Add the following at the end of the .ini file::
 

	
 
    [filter:proxy-prefix]
 
    use = egg:PasteDeploy#prefix
 
    prefix = /<someprefix>
 

	
 
then change ``<someprefix>`` into your chosen prefix
 

	
 

	
 
Apache with mod_wsgi
 
--------------------
 

	
 
Alternatively, Kallithea can be set up with Apache under mod_wsgi. For
 
that, you'll need to:
 

	
 
- Install mod_wsgi. If using a Debian-based distro, you can install
 
  the package libapache2-mod-wsgi::
 

	
 
    aptitude install libapache2-mod-wsgi
 

	
 
- Enable mod_wsgi::
 

	
 
    a2enmod wsgi
 

	
 
- Create a wsgi dispatch script, like the one below. Make sure you
 
  check that the paths correctly point to where you installed Kallithea
 
  and its Python Virtual Environment.
 
- Enable the ``WSGIScriptAlias`` directive for the WSGI dispatch script,
 
  as in the following example. Once again, check the paths are
 
  correctly specified.
 

	
 
Here is a sample excerpt from an Apache Virtual Host configuration file:
 

	
 
.. code-block:: apache
 

	
 
    WSGIDaemonProcess kallithea \
 
        processes=1 threads=4 \
 
        python-path=/srv/kallithea/pyenv/lib/python2.7/site-packages
 
    WSGIScriptAlias / /srv/kallithea/dispatch.wsgi
 
    WSGIPassAuthorization On
 

	
 
Or if using a dispatcher WSGI script with proper virtualenv activation:
 

	
 
.. code-block:: apache
 

	
 
    WSGIDaemonProcess kallithea processes=1 threads=4
 
    WSGIScriptAlias / /srv/kallithea/dispatch.wsgi
 
    WSGIPassAuthorization On
 

	
 
.. note::
 
   When running apache as root, please make sure it doesn't run Kallithea as
 
   root, for examply by adding: ``user=www-data group=www-data`` to the configuration.
 

	
 
.. note::
 
   If running Kallithea in multiprocess mode,
 
   make sure you set ``instance_id = *`` in the configuration so each process
 
   gets it's own cache invalidation key.
 

	
 
Example WSGI dispatch script:
 

	
 
.. code-block:: python
 

	
 
    import os
 
    os.environ["HGENCODING"] = "UTF-8"
 
    os.environ['PYTHON_EGG_CACHE'] = '/srv/kallithea/.egg-cache'
 

	
 
    # sometimes it's needed to set the curent dir
 
    os.chdir('/srv/kallithea/')
 

	
 
    import site
 
    site.addsitedir("/srv/kallithea/pyenv/lib/python2.7/site-packages")
 

	
 
    from paste.deploy import loadapp
 
    from paste.script.util.logging_config import fileConfig
 

	
 
    fileConfig('/srv/kallithea/my.ini')
 
    application = loadapp('config:/srv/kallithea/my.ini')
 

	
 
Or using proper virtualenv activation:
 

	
 
.. code-block:: python
 

	
 
    activate_this = '/srv/kallithea/venv/bin/activate_this.py'
 
    execfile(activate_this, dict(__file__=activate_this))
 

	
 
    import os
 
    os.environ['HOME'] = '/srv/kallithea'
 

	
 
    ini = '/srv/kallithea/kallithea.ini'
 
    from paste.script.util.logging_config import fileConfig
 
    fileConfig(ini)
 
    from paste.deploy import loadapp
 
    application = loadapp('config:' + ini)
 

	
 

	
 
Other configuration files
 
-------------------------
 

	
 
A number of `example init.d scripts`__ can be found in
 
the ``init.d`` directory of the Kallithea source.
 

	
 
.. __: https://kallithea-scm.org/repos/kallithea/files/tip/init.d/ .
 

	
 

	
 
.. _virtualenv: http://pypi.python.org/pypi/virtualenv
 
.. _python: http://www.python.org/
 
.. _Mercurial: http://mercurial.selenic.com/
 
.. _Celery: http://celeryproject.org/
 
.. _Celery documentation: http://docs.celeryproject.org/en/latest/getting-started/index.html
 
.. _RabbitMQ: http://www.rabbitmq.com/
 
.. _Redis: http://redis.io/
 
.. _python-ldap: http://www.python-ldap.org/
 
.. _mercurial-server: http://www.lshift.net/mercurial-server.html
 
.. _PublishingRepositories: http://mercurial.selenic.com/wiki/PublishingRepositories
docs/usage/general.rst
Show inline comments
 
.. _general:
 

	
 
=======================
 
General Kallithea usage
 
=======================
 

	
 

	
 
Repository deletion
 
-------------------
 

	
 
Currently when an admin or owner deletes a repository, Kallithea does
 
not physically delete said repository from the filesystem, but instead
 
renames it in a special way so that it is not possible to push, clone
 
or access the repository.
 

	
 
There is a special command for cleaning up such archived repositories::
 

	
 
    paster cleanup-repos --older-than=30d my.ini
 

	
 
This command scans for archived repositories that are older than
 
30 days, displays them, and asks if you want to delete them (unless given
 
the ``--dont-ask`` flag). If you host a large amount of repositories with
 
forks that are constantly being deleted, it is recommended that you run this
 
command via crontab.
 

	
 
It is worth noting that even if someone is given administrative access to
 
Kallithea and deletes a repository, you can easily restore such an action by
 
renaming the repository directory, removing the ``rm__<date>`` prefix.
 

	
 

	
 
File view: follow current branch
 
--------------------------------
 

	
 
In the file view, left and right arrows allow to jump to the previous and next
 
revision. Depending on the way revisions were created in the repository, this
 
could jump to a different branch.  When the checkbox ``Follow current branch``
 
is checked, these arrows will only jump to revisions on the same branch as the
 
currently visible revision.  So for example, if someone is viewing files in the
 
``beta`` branch and marks the `Follow current branch` checkbox, the < and >
 
arrows will only show revisions on the ``beta`` branch.
 

	
 

	
 
Changelog features
 
------------------
 

	
 
The core feature of a repository's ``changelog`` page is to show the revisions
 
in a repository. However, there are several other features available from the
 
changelog.
 

	
 
Branch filter
 
  By default, the changelog shows revisions from all branches in the
 
  repository. Use the branch filter to restrict to a given branch.
 

	
 
Viewing a changeset
 
  A particular changeset can be opened by clicking on either the changeset
 
  hash or the commit message, or by ticking the checkbox and clicking the
 
  ``Show selected changeset`` button at the top.
 

	
 
Viewing all changes between two changesets
 
  To get a list of all changesets between two selected changesets, along with
 
  the changes in each one of them, tick the checkboxes of the first and
 
  last changeset in the desired range and click the ``Show selected changesets``
 
  button at the top. You can only show the range between the first and last
 
  checkbox (no cherry-picking).
 

	
 
  From that page, you can proceed to viewing the overall delta between the
 
  selected changesets, by clicking the ``Compare revisions`` button.
 

	
 
Creating a pull request
 
  You can create a new pull request for the changes of a particular changeset
 
  (and its ancestors) by selecting it and clicking the ``Open new pull request
 
  for selected changesets`` button.
 

	
 

	
 
Permanent repository URLs
 
-------------------------
 

	
 
Due to the complicated nature of repository grouping, URLs of repositories
 
can often change. For example, a repository originally accessible from::
 

	
 
  http://example.com/repo_name
 
  http://kallithea.example.com/repo_name
 

	
 
would get a new URL after moving it to test_group::
 

	
 
  http://example.com/test_group/repo_name
 
  http://kallithea.example.com/test_group/repo_name
 

	
 
Such moving of a repository to a group can be an issue for build systems and
 
other scripts where the repository paths are hardcoded. To mitigate this,
 
Kallithea provides permanent URLs using the repository ID prefixed with an
 
underscore. In all Kallithea URLs, for example those for the changelog and the
 
file view, a repository name can be replaced by this ``_ID`` string. Since IDs
 
are always the same, moving the repository to a different group will not affect
 
such URLs.
 

	
 
In the example, the repository could also be accessible as::
 

	
 
  http://example.com/_<ID>
 
  http://kallithea.example.com/_<ID>
 

	
 
The ID of a given repository can be shown from the repository ``Summary`` page,
 
by selecting the ``Show by ID`` button next to ``Clone URL``.
 

	
 

	
 
Email notifications
 
-------------------
 

	
 
With email settings properly configured in the Kallithea
 
configuration file, Kallithea will send emails on user registration and when
 
errors occur.
 

	
 
Emails are also sent for comments on changesets. In this case, an email is sent
 
to the committer of the changeset (if known to Kallithea), to all reviewers of
 
the pull request (if applicable) and to all people mentioned in the comment
 
using @mention notation.
 

	
 

	
 
Trending source files
 
---------------------
 

	
 
Trending source files are calculated based on a predefined dictionary of known
 
types and extensions. If an extension is missing or you would like to scan
 
custom files, it is possible to extend the ``LANGUAGES_EXTENSIONS_MAP``
 
dictionary located in ``kallithea/config/conf.py`` with new types.
 

	
 

	
 
Cloning remote repositories
 
---------------------------
 

	
 
Kallithea has the ability to clone repositories from given remote locations.
 
Currently it supports the following options:
 

	
 
- hg  -> hg clone
 
- svn -> hg clone
 
- git -> git clone
 

	
 
.. note:: svn -> hg cloning requires the ``hgsubversion`` library to be
 
   installed.
 

	
 
If you need to clone repositories that are protected via basic authentication,
 
you can pass the credentials in the URL, e.g.
 
``http://user:passw@remote.server/repo``. Kallithea will then try to login and
 
``http://user:passw@remote.example.com/repo``. Kallithea will then try to login and
 
clone using the given credentials. Please note that the given credentials will
 
be stored as plaintext inside the database. However, the authentication
 
information will not be shown in the clone URL on the summary page.
 

	
 

	
 
Specific features configurable in the Admin settings
 
----------------------------------------------------
 

	
 
In general, the Admin settings should be self-explanatory and will not be
 
described in more detail in this documentation. However, there are a few
 
features that merit further explanation.
 

	
 
Repository extra fields
 
~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
In the *Visual* tab, there is an option "Use repository extra
 
fields", which allows to set custom fields for each repository in the system.
 

	
 
Once enabled site-wide, the custom fields can be edited per-repository under
 
*Options* | *Settings* | *Extra Fields*.
 

	
 
Example usage of such fields would be to define company-specific information
 
into repositories, e.g., defining a ``repo_manager`` key that would give info
 
about a manager of each repository.  There's no limit for adding custom fields.
 
Newly created fields are accessible via the API.
 

	
 
Meta tagging
 
~~~~~~~~~~~~
 

	
 
In the *Visual* tab, option "Stylify recognised meta tags" will cause Kallithea
 
to turn certain text fragments in repository and repository group
 
descriptions into colored tags. Currently recognised tags are::
 

	
 
    [featured]
 
    [stale]
 
    [dead]
 
    [lang => lang]
 
    [license => License]
 
    [requires => Repo]
 
    [recommends => Repo]
 
    [see => URI]
kallithea/bin/kallithea_api.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_api
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Api CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: Jun 3, 2012
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import sys
 
import argparse
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] "
 
      "METHOD <key:val> <key2:val> ...\n"
 
      "Create config file: kallithea-api --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
      "Create config file: kallithea-api --apikey=<key> --apihost=http://kallithea.example.com --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea API cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file')
 
    group.add_argument('--save-config', action='store_true', help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('API')
 
    group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None,
 
            help='API method name to call followed by key:value attributes',
 
    )
 
    group.add_argument('--format', dest='format', type=str,
 
            help='output format default: `%s` can '
 
                 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    apihost = args.apihost or conf['apihost']
 
    method = args.method
 

	
 
    # if we don't have method here it's an error
 
    if not method:
 
        parser.error('Please specify method name')
 

	
 
    try:
 
        margs = dict(map(lambda s: s.split(':', 1), other))
 
    except ValueError:
 
        sys.stderr.write('Error parsing arguments \n')
 
        sys.exit()
 
    if args.format == FORMAT_PRETTY:
 
        print 'Calling method %s => %s' % (method, apihost)
 

	
 
    json_resp = api_call(apikey, apihost, method, **margs)
 
    error_prefix = ''
 
    if json_resp['error']:
 
        error_prefix = 'ERROR:'
 
        json_data = json_resp['error']
 
    else:
 
        json_data = json_resp['result']
 
    if args.format == FORMAT_JSON:
 
        print json.dumps(json_data)
 
    elif args.format == FORMAT_PRETTY:
 
        print 'Server response \n%s%s' % (
 
            error_prefix, json.dumps(json_data, indent=4, sort_keys=True)
 
        )
 
    return 0
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/kallithea_gist.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
"""
 
kallithea.bin.kallithea_gist
 
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 

	
 
Gist CLI client for Kallithea
 

	
 
This file was forked by the Kallithea project in July 2014.
 
Original author and date, and relevant copyright and licensing information is below:
 
:created_on: May 9, 2013
 
:author: marcink
 
:copyright: (c) 2013 RhodeCode GmbH, and others.
 
:license: GPLv3, see LICENSE.md for more details.
 
"""
 

	
 
import os
 
import sys
 
import stat
 
import argparse
 
import fileinput
 

	
 
from kallithea.bin.base import json, api_call, RcConf, FORMAT_JSON, FORMAT_PRETTY
 

	
 

	
 
def argparser(argv):
 
    usage = (
 
      "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] "
 
      "[--config=CONFIG] [--save-config] [GIST OPTIONS] "
 
      "[filename or stdin use - for terminal stdin ]\n"
 
      "Create config file: kallithea-gist --apikey=<key> --apihost=http://your.kallithea.server --save-config"
 
      "Create config file: kallithea-gist --apikey=<key> --apihost=http://kallithea.example.com --save-config"
 
    )
 

	
 
    parser = argparse.ArgumentParser(description='Kallithea Gist cli',
 
                                     usage=usage)
 

	
 
    ## config
 
    group = parser.add_argument_group('config')
 
    group.add_argument('--apikey', help='api access key')
 
    group.add_argument('--apihost', help='api host')
 
    group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea')
 
    group.add_argument('--save-config', action='store_true',
 
                       help='save the given config into a file')
 

	
 
    group = parser.add_argument_group('GIST')
 
    group.add_argument('-p', '--private', action='store_true',
 
                       help='create private Gist')
 
    group.add_argument('-f', '--filename',
 
                       help='set uploaded gist filename, '
 
                            'also defines syntax highlighting')
 
    group.add_argument('-d', '--description', help='Gist description')
 
    group.add_argument('-l', '--lifetime', metavar='MINUTES',
 
                       help='gist lifetime in minutes, -1 (DEFAULT) is forever')
 
    group.add_argument('--format', dest='format', type=str,
 
                       help='output format DEFAULT: `%s` can '
 
                       'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON),
 
            default=FORMAT_PRETTY
 
    )
 
    args, other = parser.parse_known_args()
 
    return parser, args, other
 

	
 

	
 
def _run(argv):
 
    conf = None
 
    parser, args, other = argparser(argv)
 

	
 
    api_credentials_given = (args.apikey and args.apihost)
 
    if args.save_config:
 
        if not api_credentials_given:
 
            raise parser.error('--save-config requires --apikey and --apihost')
 
        conf = RcConf(config_location=args.config,
 
                      autocreate=True, config={'apikey': args.apikey,
 
                                               'apihost': args.apihost})
 
        sys.exit()
 

	
 
    if not conf:
 
        conf = RcConf(config_location=args.config, autoload=True)
 
        if not conf:
 
            if not api_credentials_given:
 
                parser.error('Could not find config file and missing '
 
                             '--apikey or --apihost in params')
 

	
 
    apikey = args.apikey or conf['apikey']
 
    host = args.apihost or conf['apihost']
 
    DEFAULT_FILENAME = 'gistfile1.txt'
 
    if other:
 
        # skip multifiles for now
 
        filename = other[0]
 
        if filename == '-':
 
            filename = DEFAULT_FILENAME
 
            gist_content = ''
 
            for line in fileinput.input('-'):
 
                gist_content += line
 
        else:
 
            with open(filename, 'rb') as f:
 
                gist_content = f.read()
 

	
 
    else:
 
        filename = DEFAULT_FILENAME
 
        gist_content = None
 
        # little bit hacky but cross platform check where the
 
        # stdin comes from we skip the terminal case it can be handled by '-'
 
        mode = os.fstat(0).st_mode
 
        if stat.S_ISFIFO(mode):
 
            # "stdin is piped"
 
            gist_content = sys.stdin.read()
 
        elif stat.S_ISREG(mode):
 
            # "stdin is redirected"
 
            gist_content = sys.stdin.read()
 
        else:
 
            # "stdin is terminal"
 
            pass
 

	
 
    # make sure we don't upload binary stuff
 
    if gist_content and '\0' in gist_content:
 
        raise Exception('Error: binary files upload is not possible')
 

	
 
    filename = os.path.basename(args.filename or filename)
 
    if gist_content:
 
        files = {
 
            filename: {
 
                'content': gist_content,
 
                'lexer': None
 
            }
 
        }
 

	
 
        margs = dict(
 
            lifetime=args.lifetime,
 
            description=args.description,
 
            gist_type='private' if args.private else 'public',
 
            files=files
 
        )
 

	
 
        json_data = api_call(apikey, host, 'create_gist', **margs)['result']
 
        if args.format == FORMAT_JSON:
 
            print json.dumps(json_data)
 
        elif args.format == FORMAT_PRETTY:
 
            print json_data
 
            print 'Created %s gist %s' % (json_data['gist']['type'],
 
                                          json_data['gist']['url'])
 
    return 0
 

	
 

	
 
def main(argv=None):
 
    """
 
    Main execution function for cli
 

	
 
    :param argv:
 
    """
 
    if argv is None:
 
        argv = sys.argv
 

	
 
    try:
 
        return _run(argv)
 
    except Exception as e:
 
        print e
 
        return 1
 

	
 

	
 
if __name__ == '__main__':
 
    sys.exit(main(sys.argv))
kallithea/bin/ldap_sync.conf
Show inline comments
 
[default]
 
api_url = http://your.kallithea.server:5000/_admin/api
 
api_url = http://kallithea.example.com/_admin/api
 
api_user = admin
 
api_key = XXXXXXXXXXXX
 

	
 
ldap_uri = ldap://your.ldap.server:389
 
ldap_user = cn=kallithea,ou=binders,dc=linaro,dc=org
 
ldap_uri = ldap://ldap.example.com:389
 
ldap_user = cn=kallithea,dc=example,dc=com
 
ldap_key = XXXXXXXXX
 
base_dn = dc=linaro,dc=org
 
base_dn = dc=example,dc=com
 

	
 
sync_users = True
 
\ No newline at end of file
kallithea/bin/template.ini.mako
Show inline comments
 
## -*- coding: utf-8 -*-
 
<%text>################################################################################</%text>
 
<%text>################################################################################</%text>
 
# Kallithea - config file generated with kallithea-config                      #
 
<%text>################################################################################</%text>
 
<%text>################################################################################</%text>
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 

	
 
<%text>################################################################################</%text>
 
<%text>## Email settings                                                             ##</%text>
 
<%text>##                                                                            ##</%text>
 
<%text>## Refer to the documentation ("Email settings") for more details.            ##</%text>
 
<%text>##                                                                            ##</%text>
 
<%text>## It is recommended to use a valid sender address that passes access         ##</%text>
 
<%text>## validation and spam filtering in mail servers.                             ##</%text>
 
<%text>################################################################################</%text>
 

	
 
<%text>## 'From' header for application emails. You can optionally add a name.</%text>
 
<%text>## Default:</%text>
 
#app_email_from = Kallithea
 
<%text>## Examples:</%text>
 
#app_email_from = Kallithea <kallithea-noreply@example.com>
 
#app_email_from = kallithea-noreply@example.com
 

	
 
<%text>## Subject prefix for application emails.</%text>
 
<%text>## A space between this prefix and the real subject is automatically added.</%text>
 
<%text>## Default:</%text>
 
#email_prefix =
 
<%text>## Example:</%text>
 
#email_prefix = [Kallithea]
 

	
 
<%text>## Recipients for error emails and fallback recipients of application mails.</%text>
 
<%text>## Multiple addresses can be specified, space-separated.</%text>
 
<%text>## Only addresses are allowed, do not add any name part.</%text>
 
<%text>## Default:</%text>
 
#email_to =
 
<%text>## Examples:</%text>
 
#email_to = admin@example.com
 
#email_to = admin@example.com another_admin@example.com
 

	
 
<%text>## 'From' header for error emails. You can optionally add a name.</%text>
 
<%text>## Default:</%text>
 
#error_email_from = pylons@yourapp.com
 
<%text>## Examples:</%text>
 
#error_email_from = Kallithea Errors <kallithea-noreply@example.com>
 
#error_email_from = paste_error@example.com
 

	
 
<%text>## SMTP server settings</%text>
 
<%text>## Only smtp_server is mandatory. All other settings take the specified default</%text>
 
<%text>## values.</%text>
 
#smtp_server = mail.server.com
 
#smtp_server = smtp.example.com
 
#smtp_username =
 
#smtp_password =
 
#smtp_port = 25
 
#smtp_use_tls = false
 
#smtp_use_ssl = false
 
<%text>## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.).</%text>
 
<%text>## If empty, use any of the authentication parameters supported by the server.</%text>
 
#smtp_auth =
 

	
 
[server:main]
 
%if http_server == 'paste':
 
<%text>## PASTE ##</%text>
 
use = egg:Paste#http
 
<%text>## nr of worker threads to spawn</%text>
 
threadpool_workers = 5
 
<%text>## max request before thread respawn</%text>
 
threadpool_max_requests = 10
 
<%text>## option to use threads of process</%text>
 
use_threadpool = true
 

	
 
%elif http_server == 'waitress':
 
<%text>## WAITRESS ##</%text>
 
use = egg:waitress#main
 
<%text>## number of worker threads</%text>
 
threads = 5
 
<%text>## MAX BODY SIZE 100GB</%text>
 
max_request_body_size = 107374182400
 
<%text>## use poll instead of select, fixes fd limits, may not work on old</%text>
 
<%text>## windows systems.</%text>
 
#asyncore_use_poll = True
 

	
 
%elif http_server == 'gunicorn':
 
<%text>## GUNICORN ##</%text>
 
use = egg:gunicorn#main
 
<%text>## number of process workers. You must set `instance_id = *` when this option</%text>
 
<%text>## is set to more than one worker</%text>
 
workers = 1
 
<%text>## process name</%text>
 
proc_name = kallithea
 
<%text>## type of worker class, one of sync, eventlet, gevent, tornado</%text>
 
<%text>## recommended for bigger setup is using of of other than sync one</%text>
 
worker_class = sync
 
max_requests = 1000
 
<%text>## ammount of time a worker can handle request before it gets killed and</%text>
 
<%text>## restarted</%text>
 
timeout = 3600
 

	
 
%elif http_server == 'uwsgi':
 
<%text>## UWSGI ##</%text>
 
<%text>## run with uwsgi --ini-paste-logged <inifile.ini></%text>
 
[uwsgi]
 
socket = /tmp/uwsgi.sock
 
master = true
 
http = 127.0.0.1:5000
 

	
 
<%text>## set as deamon and redirect all output to file</%text>
 
#daemonize = ./uwsgi_kallithea.log
 

	
 
<%text>## master process PID</%text>
 
pidfile = ./uwsgi_kallithea.pid
 

	
 
<%text>## stats server with workers statistics, use uwsgitop</%text>
 
<%text>## for monitoring, `uwsgitop 127.0.0.1:1717`</%text>
 
stats = 127.0.0.1:1717
 
memory-report = true
 

	
 
<%text>## log 5XX errors</%text>
 
log-5xx = true
 

	
 
<%text>## Set the socket listen queue size.</%text>
 
listen = 256
 

	
 
<%text>## Gracefully Reload workers after the specified amount of managed requests</%text>
 
<%text>## (avoid memory leaks).</%text>
 
max-requests = 1000
 

	
 
<%text>## enable large buffers</%text>
 
buffer-size = 65535
 

	
 
<%text>## socket and http timeouts ##</%text>
 
http-timeout = 3600
 
socket-timeout = 3600
 

	
 
<%text>## Log requests slower than the specified number of milliseconds.</%text>
 
log-slow = 10
 

	
 
<%text>## Exit if no app can be loaded.</%text>
 
need-app = true
 

	
 
<%text>## Set lazy mode (load apps in workers instead of master).</%text>
 
lazy = true
 

	
 
<%text>## scaling ##</%text>
 
<%text>## set cheaper algorithm to use, if not set default will be used</%text>
 
cheaper-algo = spare
 

	
 
<%text>## minimum number of workers to keep at all times</%text>
 
cheaper = 1
 

	
 
<%text>## number of workers to spawn at startup</%text>
 
cheaper-initial = 1
 

	
 
<%text>## maximum number of workers that can be spawned</%text>
 
workers = 4
 

	
 
<%text>## how many workers should be spawned at a time</%text>
 
cheaper-step = 1
 

	
 
%endif
 
<%text>## COMMON ##</%text>
 
host = ${host}
 
port = ${port}
 

	
 
<%text>## middleware for hosting the WSGI application under a URL prefix</%text>
 
#[filter:proxy-prefix]
 
#use = egg:PasteDeploy#prefix
 
#prefix = /<your-prefix>
 

	
 
[app:main]
 
use = egg:kallithea
 
<%text>## enable proxy prefix middleware</%text>
 
#filter-with = proxy-prefix
 

	
 
full_stack = true
 
static_files = true
 
<%text>## Available Languages:</%text>
 
<%text>## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW</%text>
 
lang =
 
cache_dir = ${here}/data
 
index_dir = ${here}/data/index
 

	
 
<%text>## perform a full repository scan on each server start, this should be</%text>
 
<%text>## set to false after first startup, to allow faster server restarts.</%text>
 
initial_repo_scan = false
 

	
 
<%text>## uncomment and set this path to use archive download cache</%text>
 
archive_cache_dir = ${here}/tarballcache
 

	
 
<%text>## change this to unique ID for security</%text>
 
app_instance_uuid = ${uuid()}
 

	
 
<%text>## cut off limit for large diffs (size in bytes)</%text>
 
cut_off_limit = 256000
 

	
 
<%text>## use cache version of scm repo everywhere</%text>
 
vcs_full_cache = true
 

	
 
<%text>## force https in Kallithea, fixes https redirects, assumes it's always https</%text>
 
force_https = false
 

	
 
<%text>## use Strict-Transport-Security headers</%text>
 
use_htsts = false
 

	
 
<%text>## number of commits stats will parse on each iteration</%text>
 
commit_parse_limit = 25
 

	
 
<%text>## path to git executable</%text>
 
git_path = git
 

	
 
<%text>## git rev filter option, --all is the default filter, if you need to</%text>
 
<%text>## hide all refs in changelog switch this to --branches --tags</%text>
 
#git_rev_filter = --branches --tags
 

	
 
<%text>## RSS feed options</%text>
 
rss_cut_off_limit = 256000
 
rss_items_per_page = 10
 
rss_include_diff = false
 

	
 
<%text>## options for showing and identifying changesets</%text>
 
show_sha_length = 12
 
show_revision_number = false
 

	
 
<%text>## gist URL alias, used to create nicer urls for gist. This should be an</%text>
 
<%text>## url that does rewrites to _admin/gists/<gistid>.</%text>
 
<%text>## example: http://gist.kallithea.server/{gistid}. Empty means use the internal</%text>
 
<%text>## Kallithea url, ie. http[s]://your.kallithea.server/_admin/gists/<gistid></%text>
 
<%text>## example: http://gist.example.com/{gistid}. Empty means use the internal</%text>
 
<%text>## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid></%text>
 
gist_alias_url =
 

	
 
<%text>## white list of API enabled controllers. This allows to add list of</%text>
 
<%text>## controllers to which access will be enabled by api_key. eg: to enable</%text>
 
<%text>## api access to raw_files put `FilesController:raw`, to enable access to patches</%text>
 
<%text>## add `ChangesetController:changeset_patch`. This list should be "," separated</%text>
 
<%text>## Syntax is <ControllerClass>:<function>. Check debug logs for generated names</%text>
 
<%text>## Recommended settings below are commented out:</%text>
 
api_access_controllers_whitelist =
 
#    ChangesetController:changeset_patch,
 
#    ChangesetController:changeset_raw,
 
#    FilesController:raw,
 
#    FilesController:archivefile
 

	
 
<%text>## default encoding used to convert from and to unicode</%text>
 
<%text>## can be also a comma seperated list of encoding in case of mixed encodings</%text>
 
default_encoding = utf8
 

	
 
<%text>## issue tracker for Kallithea (leave blank to disable, absent for default)</%text>
 
#bugtracker = https://bitbucket.org/conservancy/kallithea/issues
 

	
 
<%text>## issue tracking mapping for commits messages</%text>
 
<%text>## comment out issue_pat, issue_server, issue_prefix to enable</%text>
 

	
 
<%text>## pattern to get the issues from commit messages</%text>
 
<%text>## default one used here is #<numbers> with a regex passive group for `#`</%text>
 
<%text>## {id} will be all groups matched from this pattern</%text>
 

	
 
issue_pat = (?:\s*#)(\d+)
 

	
 
<%text>## server url to the issue, each {id} will be replaced with match</%text>
 
<%text>## fetched from the regex and {repo} is replaced with full repository name</%text>
 
<%text>## including groups {repo_name} is replaced with just name of repo</%text>
 

	
 
issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 
issue_server_link = https://issues.example.com/{repo}/issue/{id}
 

	
 
<%text>## prefix to add to link to indicate it's an url</%text>
 
<%text>## #314 will be replaced by <issue_prefix><id></%text>
 

	
 
issue_prefix = #
 

	
 
<%text>## issue_pat, issue_server_link, issue_prefix can have suffixes to specify</%text>
 
<%text>## multiple patterns, to other issues server, wiki or others</%text>
 
<%text>## below an example how to create a wiki pattern</%text>
 
# wiki-some-id -> https://mywiki.com/some-id
 
# wiki-some-id -> https://wiki.example.com/some-id
 

	
 
#issue_pat_wiki = (?:wiki-)(.+)
 
#issue_server_link_wiki = https://mywiki.com/{id}
 
#issue_server_link_wiki = https://wiki.example.com/{id}
 
#issue_prefix_wiki = WIKI-
 

	
 
<%text>## instance-id prefix</%text>
 
<%text>## a prefix key for this instance used for cache invalidation when running</%text>
 
<%text>## multiple instances of kallithea, make sure it's globally unique for</%text>
 
<%text>## all running kallithea instances. Leave empty if you don't use it</%text>
 
instance_id =
 

	
 
<%text>## alternative return HTTP header for failed authentication. Default HTTP</%text>
 
<%text>## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with</%text>
 
<%text>## handling that. Set this variable to 403 to return HTTPForbidden</%text>
 
auth_ret_code =
 

	
 
<%text>## locking return code. When repository is locked return this HTTP code. 2XX</%text>
 
<%text>## codes don't break the transactions while 4XX codes do</%text>
 
lock_ret_code = 423
 

	
 
<%text>## allows to change the repository location in settings page</%text>
 
allow_repo_location_change = True
 

	
 
<%text>## allows to setup custom hooks in settings page</%text>
 
allow_custom_hooks_settings = True
 

	
 
<%text>####################################</%text>
 
<%text>###        CELERY CONFIG        ####</%text>
 
<%text>####################################</%text>
 

	
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = kallithea.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = DEBUG
 
celeryd.max.tasks.per.child = 1
 

	
 
<%text>## tasks will never be sent to the queue, but executed locally instead.</%text>
 
celery.always.eager = false
 

	
 
<%text>####################################</%text>
 
<%text>###         BEAKER CACHE        ####</%text>
 
<%text>####################################</%text>
 

	
 
beaker.cache.data_dir = ${here}/data/cache/data
 
beaker.cache.lock_dir = ${here}/data/cache/lock
 

	
 
beaker.cache.regions = short_term,long_term,sql_cache_short
 

	
 
beaker.cache.short_term.type = memory
 
beaker.cache.short_term.expire = 60
 
beaker.cache.short_term.key_length = 256
 

	
 
beaker.cache.long_term.type = memory
 
beaker.cache.long_term.expire = 36000
 
beaker.cache.long_term.key_length = 256
 

	
 
beaker.cache.sql_cache_short.type = memory
 
beaker.cache.sql_cache_short.expire = 10
 
beaker.cache.sql_cache_short.key_length = 256
 

	
 
<%text>####################################</%text>
 
<%text>###       BEAKER SESSION        ####</%text>
 
<%text>####################################</%text>
 

	
 
<%text>## Name of session cookie. Should be unique for a given host and path, even when running</%text>
 
<%text>## on different ports. Otherwise, cookie sessions will be shared and messed up.</%text>
 
beaker.session.key = kallithea
 
<%text>## Sessions should always only be accessible by the browser, not directly by JavaScript.</%text>
 
beaker.session.httponly = true
 
<%text>## Session lifetime. 2592000 seconds is 30 days.</%text>
 
beaker.session.timeout = 2592000
 

	
 
<%text>## Server secret used with HMAC to ensure integrity of cookies.</%text>
 
beaker.session.secret = ${uuid()}
 
<%text>## Further, encrypt the data with AES.</%text>
 
#beaker.session.encrypt_key = <key_for_encryption>
 
#beaker.session.validate_key = <validation_key>
 

	
 
<%text>## Type of storage used for the session, current types are</%text>
 
<%text>## dbm, file, memcached, database, and memory.</%text>
 

	
 
<%text>## File system storage of session data. (default)</%text>
 
#beaker.session.type = file
 

	
 
<%text>## Cookie only, store all session data inside the cookie. Requires secure secrets.</%text>
 
#beaker.session.type = cookie
 

	
 
<%text>## Database storage of session data.</%text>
 
#beaker.session.type = ext:database
 
#beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea
 
#beaker.session.table_name = db_session
 

	
 
%if error_aggregation_service == 'errormator':
 
<%text>############################</%text>
 
<%text>## ERROR HANDLING SYSTEMS ##</%text>
 
<%text>############################</%text>
 

	
 
<%text>####################</%text>
 
<%text>### [errormator] ###</%text>
 
<%text>####################</%text>
 

	
 
<%text>## Errormator is tailored to work with Kallithea, see</%text>
 
<%text>## http://errormator.com for details how to obtain an account</%text>
 
<%text>## you must install python package `errormator_client` to make it work</%text>
 

	
 
<%text>## errormator enabled</%text>
 
errormator = false
 

	
 
errormator.server_url = https://api.errormator.com
 
errormator.api_key = YOUR_API_KEY
 

	
 
<%text>## TWEAK AMOUNT OF INFO SENT HERE</%text>
 

	
 
<%text>## enables 404 error logging (default False)</%text>
 
errormator.report_404 = false
 

	
 
<%text>## time in seconds after request is considered being slow (default 1)</%text>
 
errormator.slow_request_time = 1
 

	
 
<%text>## record slow requests in application</%text>
 
<%text>## (needs to be enabled for slow datastore recording and time tracking)</%text>
 
errormator.slow_requests = true
 

	
 
<%text>## enable hooking to application loggers</%text>
 
#errormator.logging = true
 

	
 
<%text>## minimum log level for log capture</%text>
 
#errormator.logging.level = WARNING
 

	
 
<%text>## send logs only from erroneous/slow requests</%text>
 
<%text>## (saves API quota for intensive logging)</%text>
 
errormator.logging_on_error = false
 

	
 
<%text>## list of additonal keywords that should be grabbed from environ object</%text>
 
<%text>## can be string with comma separated list of words in lowercase</%text>
 
<%text>## (by default client will always send following info:</%text>
 
<%text>## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that</%text>
 
<%text>## start with HTTP* this list be extended with additional keywords here</%text>
 
errormator.environ_keys_whitelist =
 

	
 
<%text>## list of keywords that should be blanked from request object</%text>
 
<%text>## can be string with comma separated list of words in lowercase</%text>
 
<%text>## (by default client will always blank keys that contain following words</%text>
 
<%text>## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'</%text>
 
<%text>## this list be extended with additional keywords set here</%text>
 
errormator.request_keys_blacklist =
 

	
 
<%text>## list of namespaces that should be ignores when gathering log entries</%text>
 
<%text>## can be string with comma separated list of namespaces</%text>
 
<%text>## (by default the client ignores own entries: errormator_client.client)</%text>
 
errormator.log_namespace_blacklist =
 

	
 
%elif error_aggregation_service == 'sentry':
 
<%text>################</%text>
 
<%text>### [sentry] ###</%text>
 
<%text>################</%text>
 

	
 
<%text>## sentry is a alternative open source error aggregator</%text>
 
<%text>## you must install python packages `sentry` and `raven` to enable</%text>
 

	
 
sentry.dsn = YOUR_DNS
 
sentry.servers =
 
sentry.name =
 
sentry.key =
 
sentry.public_key =
 
sentry.secret_key =
 
sentry.project =
 
sentry.site =
 
sentry.include_paths =
 
sentry.exclude_paths =
 

	
 
%endif
 
<%text>################################################################################</%text>
 
<%text>## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##</%text>
 
<%text>## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##</%text>
 
<%text>## execute malicious code after an exception is raised.                       ##</%text>
 
<%text>################################################################################</%text>
 
set debug = false
 

	
 
<%text>##################################</%text>
 
<%text>###       LOGVIEW CONFIG       ###</%text>
 
<%text>##################################</%text>
 

	
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
<%text>#########################################################</%text>
 
<%text>### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###</%text>
 
<%text>#########################################################</%text>
 

	
 
%if database_engine == 'sqlite':
 
# SQLITE [default]
 
sqlalchemy.db1.url = sqlite:///${here}/kallithea.db?timeout=60
 

	
 
%elif database_engine == 'postgres':
 
# POSTGRESQL
 
sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea
 

	
 
%elif database_engine == 'mysql':
 
# MySQL
 
sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea
 

	
 
%endif
 
# see sqlalchemy docs for others
 

	
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.db1.convert_unicode = true
 

	
 
<%text>################################</%text>
 
<%text>### LOGGING CONFIGURATION   ####</%text>
 
<%text>################################</%text>
 

	
 
[loggers]
 
keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
<%text>#############</%text>
 
<%text>## LOGGERS ##</%text>
 
<%text>#############</%text>
 

	
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers =
 
qualname = routes.middleware
 
<%text>## "level = DEBUG" logs the route matched and routing variables.</%text>
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers =
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers =
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_kallithea]
 
level = DEBUG
 
handlers =
 
qualname = kallithea
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
[logger_whoosh_indexer]
 
level = DEBUG
 
handlers =
 
qualname = whoosh_indexer
 
propagate = 1
 

	
 
<%text>##############</%text>
 
<%text>## HANDLERS ##</%text>
 
<%text>##############</%text>
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = INFO
 
formatter = generic
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = WARN
 
formatter = generic
 

	
 
<%text>################</%text>
 
<%text>## FORMATTERS ##</%text>
 
<%text>################</%text>
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class = kallithea.lib.colored_formatter.ColorFormatter
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class = kallithea.lib.colored_formatter.ColorFormatterSql
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
kallithea/config/deployment.ini_tmpl
Show inline comments
 
################################################################################
 
################################################################################
 
# Kallithea - Example config                                                   #
 
#                                                                              #
 
# The %(here)s variable will be replaced with the parent directory of this file#
 
################################################################################
 
################################################################################
 

	
 
[DEFAULT]
 
debug = true
 
pdebug = false
 

	
 
################################################################################
 
## Email settings                                                             ##
 
##                                                                            ##
 
## Refer to the documentation ("Email settings") for more details.            ##
 
##                                                                            ##
 
## It is recommended to use a valid sender address that passes access         ##
 
## validation and spam filtering in mail servers.                             ##
 
################################################################################
 

	
 
## 'From' header for application emails. You can optionally add a name.
 
## Default:
 
#app_email_from = Kallithea
 
## Examples:
 
#app_email_from = Kallithea <kallithea-noreply@example.com>
 
#app_email_from = kallithea-noreply@example.com
 

	
 
## Subject prefix for application emails.
 
## A space between this prefix and the real subject is automatically added.
 
## Default:
 
#email_prefix =
 
## Example:
 
#email_prefix = [Kallithea]
 

	
 
## Recipients for error emails and fallback recipients of application mails.
 
## Multiple addresses can be specified, space-separated.
 
## Only addresses are allowed, do not add any name part.
 
## Default:
 
#email_to =
 
## Examples:
 
#email_to = admin@example.com
 
#email_to = admin@example.com another_admin@example.com
 

	
 
## 'From' header for error emails. You can optionally add a name.
 
## Default:
 
#error_email_from = pylons@yourapp.com
 
## Examples:
 
#error_email_from = Kallithea Errors <kallithea-noreply@example.com>
 
#error_email_from = paste_error@example.com
 

	
 
## SMTP server settings
 
## Only smtp_server is mandatory. All other settings take the specified default
 
## values.
 
#smtp_server = mail.server.com
 
#smtp_server = smtp.example.com
 
#smtp_username =
 
#smtp_password =
 
#smtp_port = 25
 
#smtp_use_tls = false
 
#smtp_use_ssl = false
 
## SMTP authentication parameters to use (e.g. LOGIN PLAIN CRAM-MD5, etc.).
 
## If empty, use any of the authentication parameters supported by the server.
 
#smtp_auth =
 

	
 
[server:main]
 
## PASTE ##
 
#use = egg:Paste#http
 
## nr of worker threads to spawn
 
#threadpool_workers = 5
 
## max request before thread respawn
 
#threadpool_max_requests = 10
 
## option to use threads of process
 
#use_threadpool = true
 

	
 
## WAITRESS ##
 
use = egg:waitress#main
 
## number of worker threads
 
threads = 5
 
## MAX BODY SIZE 100GB
 
max_request_body_size = 107374182400
 
## use poll instead of select, fixes fd limits, may not work on old
 
## windows systems.
 
#asyncore_use_poll = True
 

	
 
## GUNICORN ##
 
#use = egg:gunicorn#main
 
## number of process workers. You must set `instance_id = *` when this option
 
## is set to more than one worker
 
#workers = 1
 
## process name
 
#proc_name = kallithea
 
## type of worker class, one of sync, eventlet, gevent, tornado
 
## recommended for bigger setup is using of of other than sync one
 
#worker_class = sync
 
#max_requests = 1000
 
## ammount of time a worker can handle request before it gets killed and
 
## restarted
 
#timeout = 3600
 

	
 
## UWSGI ##
 
## run with uwsgi --ini-paste-logged <inifile.ini>
 
#[uwsgi]
 
#socket = /tmp/uwsgi.sock
 
#master = true
 
#http = 127.0.0.1:5000
 

	
 
## set as deamon and redirect all output to file
 
#daemonize = ./uwsgi_kallithea.log
 

	
 
## master process PID
 
#pidfile = ./uwsgi_kallithea.pid
 

	
 
## stats server with workers statistics, use uwsgitop
 
## for monitoring, `uwsgitop 127.0.0.1:1717`
 
#stats = 127.0.0.1:1717
 
#memory-report = true
 

	
 
## log 5XX errors
 
#log-5xx = true
 

	
 
## Set the socket listen queue size.
 
#listen = 256
 

	
 
## Gracefully Reload workers after the specified amount of managed requests
 
## (avoid memory leaks).
 
#max-requests = 1000
 

	
 
## enable large buffers
 
#buffer-size = 65535
 

	
 
## socket and http timeouts ##
 
#http-timeout = 3600
 
#socket-timeout = 3600
 

	
 
## Log requests slower than the specified number of milliseconds.
 
#log-slow = 10
 

	
 
## Exit if no app can be loaded.
 
#need-app = true
 

	
 
## Set lazy mode (load apps in workers instead of master).
 
#lazy = true
 

	
 
## scaling ##
 
## set cheaper algorithm to use, if not set default will be used
 
#cheaper-algo = spare
 

	
 
## minimum number of workers to keep at all times
 
#cheaper = 1
 

	
 
## number of workers to spawn at startup
 
#cheaper-initial = 1
 

	
 
## maximum number of workers that can be spawned
 
#workers = 4
 

	
 
## how many workers should be spawned at a time
 
#cheaper-step = 1
 

	
 
## COMMON ##
 
host = 127.0.0.1
 
port = 5000
 

	
 
## middleware for hosting the WSGI application under a URL prefix
 
#[filter:proxy-prefix]
 
#use = egg:PasteDeploy#prefix
 
#prefix = /<your-prefix>
 

	
 
[app:main]
 
use = egg:kallithea
 
## enable proxy prefix middleware
 
#filter-with = proxy-prefix
 

	
 
full_stack = true
 
static_files = true
 
## Available Languages:
 
## cs de fr hu ja nl_BE pl pt_BR ru sk zh_CN zh_TW
 
lang =
 
cache_dir = %(here)s/data
 
index_dir = %(here)s/data/index
 

	
 
## perform a full repository scan on each server start, this should be
 
## set to false after first startup, to allow faster server restarts.
 
initial_repo_scan = false
 

	
 
## uncomment and set this path to use archive download cache
 
archive_cache_dir = %(here)s/tarballcache
 

	
 
## change this to unique ID for security
 
app_instance_uuid = ${app_instance_uuid}
 

	
 
## cut off limit for large diffs (size in bytes)
 
cut_off_limit = 256000
 

	
 
## use cache version of scm repo everywhere
 
vcs_full_cache = true
 

	
 
## force https in Kallithea, fixes https redirects, assumes it's always https
 
force_https = false
 

	
 
## use Strict-Transport-Security headers
 
use_htsts = false
 

	
 
## number of commits stats will parse on each iteration
 
commit_parse_limit = 25
 

	
 
## path to git executable
 
git_path = git
 

	
 
## git rev filter option, --all is the default filter, if you need to
 
## hide all refs in changelog switch this to --branches --tags
 
#git_rev_filter = --branches --tags
 

	
 
## RSS feed options
 
rss_cut_off_limit = 256000
 
rss_items_per_page = 10
 
rss_include_diff = false
 

	
 
## options for showing and identifying changesets
 
show_sha_length = 12
 
show_revision_number = false
 

	
 
## gist URL alias, used to create nicer urls for gist. This should be an
 
## url that does rewrites to _admin/gists/<gistid>.
 
## example: http://gist.kallithea.server/{gistid}. Empty means use the internal
 
## Kallithea url, ie. http[s]://your.kallithea.server/_admin/gists/<gistid>
 
## example: http://gist.example.com/{gistid}. Empty means use the internal
 
## Kallithea url, ie. http[s]://kallithea.example.com/_admin/gists/<gistid>
 
gist_alias_url =
 

	
 
## white list of API enabled controllers. This allows to add list of
 
## controllers to which access will be enabled by api_key. eg: to enable
 
## api access to raw_files put `FilesController:raw`, to enable access to patches
 
## add `ChangesetController:changeset_patch`. This list should be "," separated
 
## Syntax is <ControllerClass>:<function>. Check debug logs for generated names
 
## Recommended settings below are commented out:
 
api_access_controllers_whitelist =
 
#    ChangesetController:changeset_patch,
 
#    ChangesetController:changeset_raw,
 
#    FilesController:raw,
 
#    FilesController:archivefile
 

	
 
## default encoding used to convert from and to unicode
 
## can be also a comma seperated list of encoding in case of mixed encodings
 
default_encoding = utf8
 

	
 
## issue tracker for Kallithea (leave blank to disable, absent for default)
 
#bugtracker = https://bitbucket.org/conservancy/kallithea/issues
 

	
 
## issue tracking mapping for commits messages
 
## comment out issue_pat, issue_server, issue_prefix to enable
 

	
 
## pattern to get the issues from commit messages
 
## default one used here is #<numbers> with a regex passive group for `#`
 
## {id} will be all groups matched from this pattern
 

	
 
issue_pat = (?:\s*#)(\d+)
 

	
 
## server url to the issue, each {id} will be replaced with match
 
## fetched from the regex and {repo} is replaced with full repository name
 
## including groups {repo_name} is replaced with just name of repo
 

	
 
issue_server_link = https://myissueserver.com/{repo}/issue/{id}
 
issue_server_link = https://issues.example.com/{repo}/issue/{id}
 

	
 
## prefix to add to link to indicate it's an url
 
## #314 will be replaced by <issue_prefix><id>
 

	
 
issue_prefix = #
 

	
 
## issue_pat, issue_server_link, issue_prefix can have suffixes to specify
 
## multiple patterns, to other issues server, wiki or others
 
## below an example how to create a wiki pattern
 
# wiki-some-id -> https://mywiki.com/some-id
 
# wiki-some-id -> https://wiki.example.com/some-id
 

	
 
#issue_pat_wiki = (?:wiki-)(.+)
 
#issue_server_link_wiki = https://mywiki.com/{id}
 
#issue_server_link_wiki = https://wiki.example.com/{id}
 
#issue_prefix_wiki = WIKI-
 

	
 
## instance-id prefix
 
## a prefix key for this instance used for cache invalidation when running
 
## multiple instances of kallithea, make sure it's globally unique for
 
## all running kallithea instances. Leave empty if you don't use it
 
instance_id =
 

	
 
## alternative return HTTP header for failed authentication. Default HTTP
 
## response is 401 HTTPUnauthorized. Currently Mercurial clients have trouble with
 
## handling that. Set this variable to 403 to return HTTPForbidden
 
auth_ret_code =
 

	
 
## locking return code. When repository is locked return this HTTP code. 2XX
 
## codes don't break the transactions while 4XX codes do
 
lock_ret_code = 423
 

	
 
## allows to change the repository location in settings page
 
allow_repo_location_change = True
 

	
 
## allows to setup custom hooks in settings page
 
allow_custom_hooks_settings = True
 

	
 
####################################
 
###        CELERY CONFIG        ####
 
####################################
 

	
 
use_celery = false
 
broker.host = localhost
 
broker.vhost = rabbitmqhost
 
broker.port = 5672
 
broker.user = rabbitmq
 
broker.password = qweqwe
 

	
 
celery.imports = kallithea.lib.celerylib.tasks
 

	
 
celery.result.backend = amqp
 
celery.result.dburi = amqp://
 
celery.result.serialier = json
 

	
 
#celery.send.task.error.emails = true
 
#celery.amqp.task.result.expires = 18000
 

	
 
celeryd.concurrency = 2
 
#celeryd.log.file = celeryd.log
 
celeryd.log.level = DEBUG
 
celeryd.max.tasks.per.child = 1
 

	
 
## tasks will never be sent to the queue, but executed locally instead.
 
celery.always.eager = false
 

	
 
####################################
 
###         BEAKER CACHE        ####
 
####################################
 

	
 
beaker.cache.data_dir = %(here)s/data/cache/data
 
beaker.cache.lock_dir = %(here)s/data/cache/lock
 

	
 
beaker.cache.regions = short_term,long_term,sql_cache_short
 

	
 
beaker.cache.short_term.type = memory
 
beaker.cache.short_term.expire = 60
 
beaker.cache.short_term.key_length = 256
 

	
 
beaker.cache.long_term.type = memory
 
beaker.cache.long_term.expire = 36000
 
beaker.cache.long_term.key_length = 256
 

	
 
beaker.cache.sql_cache_short.type = memory
 
beaker.cache.sql_cache_short.expire = 10
 
beaker.cache.sql_cache_short.key_length = 256
 

	
 
####################################
 
###       BEAKER SESSION        ####
 
####################################
 

	
 
## Name of session cookie. Should be unique for a given host and path, even when running
 
## on different ports. Otherwise, cookie sessions will be shared and messed up.
 
beaker.session.key = kallithea
 
## Sessions should always only be accessible by the browser, not directly by JavaScript.
 
beaker.session.httponly = true
 
## Session lifetime. 2592000 seconds is 30 days.
 
beaker.session.timeout = 2592000
 

	
 
## Server secret used with HMAC to ensure integrity of cookies.
 
beaker.session.secret = ${app_instance_uuid}
 
## Further, encrypt the data with AES.
 
#beaker.session.encrypt_key = <key_for_encryption>
 
#beaker.session.validate_key = <validation_key>
 

	
 
## Type of storage used for the session, current types are
 
## dbm, file, memcached, database, and memory.
 

	
 
## File system storage of session data. (default)
 
#beaker.session.type = file
 

	
 
## Cookie only, store all session data inside the cookie. Requires secure secrets.
 
#beaker.session.type = cookie
 

	
 
## Database storage of session data.
 
#beaker.session.type = ext:database
 
#beaker.session.sa.url = postgresql://postgres:qwe@localhost/kallithea
 
#beaker.session.table_name = db_session
 

	
 
############################
 
## ERROR HANDLING SYSTEMS ##
 
############################
 

	
 
####################
 
### [errormator] ###
 
####################
 

	
 
## Errormator is tailored to work with Kallithea, see
 
## http://errormator.com for details how to obtain an account
 
## you must install python package `errormator_client` to make it work
 

	
 
## errormator enabled
 
errormator = false
 

	
 
errormator.server_url = https://api.errormator.com
 
errormator.api_key = YOUR_API_KEY
 

	
 
## TWEAK AMOUNT OF INFO SENT HERE
 

	
 
## enables 404 error logging (default False)
 
errormator.report_404 = false
 

	
 
## time in seconds after request is considered being slow (default 1)
 
errormator.slow_request_time = 1
 

	
 
## record slow requests in application
 
## (needs to be enabled for slow datastore recording and time tracking)
 
errormator.slow_requests = true
 

	
 
## enable hooking to application loggers
 
#errormator.logging = true
 

	
 
## minimum log level for log capture
 
#errormator.logging.level = WARNING
 

	
 
## send logs only from erroneous/slow requests
 
## (saves API quota for intensive logging)
 
errormator.logging_on_error = false
 

	
 
## list of additonal keywords that should be grabbed from environ object
 
## can be string with comma separated list of words in lowercase
 
## (by default client will always send following info:
 
## 'REMOTE_USER', 'REMOTE_ADDR', 'SERVER_NAME', 'CONTENT_TYPE' + all keys that
 
## start with HTTP* this list be extended with additional keywords here
 
errormator.environ_keys_whitelist =
 

	
 
## list of keywords that should be blanked from request object
 
## can be string with comma separated list of words in lowercase
 
## (by default client will always blank keys that contain following words
 
## 'password', 'passwd', 'pwd', 'auth_tkt', 'secret', 'csrf'
 
## this list be extended with additional keywords set here
 
errormator.request_keys_blacklist =
 

	
 
## list of namespaces that should be ignores when gathering log entries
 
## can be string with comma separated list of namespaces
 
## (by default the client ignores own entries: errormator_client.client)
 
errormator.log_namespace_blacklist =
 

	
 
################
 
### [sentry] ###
 
################
 

	
 
## sentry is a alternative open source error aggregator
 
## you must install python packages `sentry` and `raven` to enable
 

	
 
sentry.dsn = YOUR_DNS
 
sentry.servers =
 
sentry.name =
 
sentry.key =
 
sentry.public_key =
 
sentry.secret_key =
 
sentry.project =
 
sentry.site =
 
sentry.include_paths =
 
sentry.exclude_paths =
 

	
 
################################################################################
 
## WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT*  ##
 
## Debug mode will enable the interactive debugging tool, allowing ANYONE to  ##
 
## execute malicious code after an exception is raised.                       ##
 
################################################################################
 
set debug = false
 

	
 
##################################
 
###       LOGVIEW CONFIG       ###
 
##################################
 

	
 
logview.sqlalchemy = #faa
 
logview.pylons.templating = #bfb
 
logview.pylons.util = #eee
 

	
 
#########################################################
 
### DB CONFIGS - EACH DB WILL HAVE IT'S OWN CONFIG    ###
 
#########################################################
 

	
 
# SQLITE [default]
 
sqlalchemy.db1.url = sqlite:///%(here)s/kallithea.db?timeout=60
 

	
 
# POSTGRESQL
 
#sqlalchemy.db1.url = postgresql://user:pass@localhost/kallithea
 

	
 
# MySQL
 
#sqlalchemy.db1.url = mysql://user:pass@localhost/kallithea
 

	
 
# see sqlalchemy docs for others
 

	
 
sqlalchemy.db1.echo = false
 
sqlalchemy.db1.pool_recycle = 3600
 
sqlalchemy.db1.convert_unicode = true
 

	
 
################################
 
### LOGGING CONFIGURATION   ####
 
################################
 

	
 
[loggers]
 
keys = root, routes, kallithea, sqlalchemy, beaker, templates, whoosh_indexer
 

	
 
[handlers]
 
keys = console, console_sql
 

	
 
[formatters]
 
keys = generic, color_formatter, color_formatter_sql
 

	
 
#############
 
## LOGGERS ##
 
#############
 

	
 
[logger_root]
 
level = NOTSET
 
handlers = console
 

	
 
[logger_routes]
 
level = DEBUG
 
handlers =
 
qualname = routes.middleware
 
## "level = DEBUG" logs the route matched and routing variables.
 
propagate = 1
 

	
 
[logger_beaker]
 
level = DEBUG
 
handlers =
 
qualname = beaker.container
 
propagate = 1
 

	
 
[logger_templates]
 
level = INFO
 
handlers =
 
qualname = pylons.templating
 
propagate = 1
 

	
 
[logger_kallithea]
 
level = DEBUG
 
handlers =
 
qualname = kallithea
 
propagate = 1
 

	
 
[logger_sqlalchemy]
 
level = INFO
 
handlers = console_sql
 
qualname = sqlalchemy.engine
 
propagate = 0
 

	
 
[logger_whoosh_indexer]
 
level = DEBUG
 
handlers =
 
qualname = whoosh_indexer
 
propagate = 1
 

	
 
##############
 
## HANDLERS ##
 
##############
 

	
 
[handler_console]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = INFO
 
formatter = generic
 

	
 
[handler_console_sql]
 
class = StreamHandler
 
args = (sys.stderr,)
 
level = WARN
 
formatter = generic
 

	
 
################
 
## FORMATTERS ##
 
################
 

	
 
[formatter_generic]
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter]
 
class = kallithea.lib.colored_formatter.ColorFormatter
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
 

	
 
[formatter_color_formatter_sql]
 
class = kallithea.lib.colored_formatter.ColorFormatterSql
 
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
 
datefmt = %Y-%m-%d %H:%M:%S
kallithea/lib/rcmail/response.py
Show inline comments
 
# The code in this module is entirely lifted from the Lamson project
 
# (http://lamsonproject.org/).  Its copyright is:
 

	
 
# Copyright (c) 2008, Zed A. Shaw
 
# All rights reserved.
 

	
 
# It is provided under this license:
 

	
 
# Redistribution and use in source and binary forms, with or without
 
# modification, are permitted provided that the following conditions are met:
 

	
 
# * Redistributions of source code must retain the above copyright notice, this
 
#   list of conditions and the following disclaimer.
 

	
 
# * Redistributions in binary form must reproduce the above copyright notice,
 
#   this list of conditions and the following disclaimer in the documentation
 
#   and/or other materials provided with the distribution.
 

	
 
# * Neither the name of the Zed A. Shaw nor the names of its contributors may
 
#   be used to endorse or promote products derived from this software without
 
#   specific prior written permission.
 

	
 
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
 
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
 
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
 
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
 
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
 
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
 
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 
# POSSIBILITY OF SUCH DAMAGE.
 

	
 
import os
 
import mimetypes
 
import string
 
from email import encoders
 
from email.charset import Charset
 
from email.utils import parseaddr
 
from email.mime.base import MIMEBase
 

	
 
ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc']
 
DEFAULT_ENCODING = "utf-8"
 
VALUE_IS_EMAIL_ADDRESS = lambda v: '@' in v
 

	
 

	
 
def normalize_header(header):
 
    return string.capwords(header.lower(), '-')
 

	
 

	
 
class EncodingError(Exception):
 
    """Thrown when there is an encoding error."""
 
    pass
 

	
 

	
 
class MailBase(object):
 
    """MailBase is used as the basis of lamson.mail and contains the basics of
 
    encoding an email.  You actually can do all your email processing with this
 
    class, but it's more raw.
 
    """
 
    def __init__(self, items=()):
 
        self.headers = dict(items)
 
        self.parts = []
 
        self.body = None
 
        self.content_encoding = {'Content-Type': (None, {}),
 
                                 'Content-Disposition': (None, {}),
 
                                 'Content-Transfer-Encoding': (None, {})}
 

	
 
    def __getitem__(self, key):
 
        return self.headers.get(normalize_header(key), None)
 

	
 
    def __len__(self):
 
        return len(self.headers)
 

	
 
    def __iter__(self):
 
        return iter(self.headers)
 

	
 
    def __contains__(self, key):
 
        return normalize_header(key) in self.headers
 

	
 
    def __setitem__(self, key, value):
 
        self.headers[normalize_header(key)] = value
 

	
 
    def __delitem__(self, key):
 
        del self.headers[normalize_header(key)]
 

	
 
    def __nonzero__(self):
 
        return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0
 

	
 
    def keys(self):
 
        """Returns the sorted keys."""
 
        return sorted(self.headers.keys())
 

	
 
    def attach_file(self, filename, data, ctype, disposition):
 
        """
 
        A file attachment is a raw attachment with a disposition that
 
        indicates the file name.
 
        """
 
        assert filename, "You can't attach a file without a filename."
 
        ctype = ctype.lower()
 

	
 
        part = MailBase()
 
        part.body = data
 
        part.content_encoding['Content-Type'] = (ctype, {'name': filename})
 
        part.content_encoding['Content-Disposition'] = (disposition,
 
                                                        {'filename': filename})
 
        self.parts.append(part)
 

	
 
    def attach_text(self, data, ctype):
 
        """
 
        This attaches a simpler text encoded part, which doesn't have a
 
        filename.
 
        """
 
        ctype = ctype.lower()
 

	
 
        part = MailBase()
 
        part.body = data
 
        part.content_encoding['Content-Type'] = (ctype, {})
 
        self.parts.append(part)
 

	
 
    def walk(self):
 
        for p in self.parts:
 
            yield p
 
            for x in p.walk():
 
                yield x
 

	
 

	
 
class MailResponse(object):
 
    """
 
    You are given MailResponse objects from the lamson.view methods, and
 
    whenever you want to generate an email to send to someone.  It has the
 
    same basic functionality as MailRequest, but it is designed to be written
 
    to, rather than read from (although you can do both).
 

	
 
    You can easily set a Body or Html during creation or after by passing it
 
    as __init__ parameters, or by setting those attributes.
 

	
 
    You can initially set the From, To, and Subject, but they are headers so
 
    use the dict notation to change them: msg['From'] = 'joe@test.com'.
 
    use the dict notation to change them: msg['From'] = 'joe@example.com'.
 

	
 
    The message is not fully crafted until right when you convert it with
 
    MailResponse.to_message.  This lets you change it and work with it, then
 
    send it out when it's ready.
 
    """
 
    def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None,
 
                 separator="; "):
 
        self.Body = Body
 
        self.Html = Html
 
        self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)])
 
        self.multipart = self.Body and self.Html
 
        self.attachments = []
 
        self.separator = separator
 

	
 
    def __contains__(self, key):
 
        return self.base.__contains__(key)
 

	
 
    def __getitem__(self, key):
 
        return self.base.__getitem__(key)
 

	
 
    def __setitem__(self, key, val):
 
        return self.base.__setitem__(key, val)
 

	
 
    def __delitem__(self, name):
 
        del self.base[name]
 

	
 
    def attach(self, filename=None, content_type=None, data=None,
 
               disposition=None):
 
        """
 

	
 
        Simplifies attaching files from disk or data as files.  To attach
 
        simple text simple give data and a content_type.  To attach a file,
 
        give the data/content_type/filename/disposition combination.
 

	
 
        For convenience, if you don't give data and only a filename, then it
 
        will read that file's contents when you call to_message() later.  If
 
        you give data and filename then it will assume you've filled data
 
        with what the file's contents are and filename is just the name to
 
        use.
 
        """
 

	
 
        assert filename or data, ("You must give a filename or some data to "
 
                                  "attach.")
 
        assert data or os.path.exists(filename), ("File doesn't exist, and no "
 
                                                  "data given.")
 

	
 
        self.multipart = True
 

	
 
        if filename and not content_type:
 
            content_type, encoding = mimetypes.guess_type(filename)
 

	
 
        assert content_type, ("No content type given, and couldn't guess "
 
                              "from the filename: %r" % filename)
 

	
 
        self.attachments.append({'filename': filename,
 
                                 'content_type': content_type,
 
                                 'data': data,
 
                                 'disposition': disposition,})
 

	
 
    def attach_part(self, part):
 
        """
 
        Attaches a raw MailBase part from a MailRequest (or anywhere)
 
        so that you can copy it over.
 
        """
 
        self.multipart = True
 

	
 
        self.attachments.append({'filename': None,
 
                                 'content_type': None,
 
                                 'data': None,
 
                                 'disposition': None,
 
                                 'part': part,
 
                                 })
 

	
 
    def attach_all_parts(self, mail_request):
 
        """
 
        Used for copying the attachment parts of a mail.MailRequest
 
        object for mailing lists that need to maintain attachments.
 
        """
 
        for part in mail_request.all_parts():
 
            self.attach_part(part)
 

	
 
        self.base.content_encoding = mail_request.base.content_encoding.copy()
 

	
 
    def clear(self):
 
        """
 
        Clears out the attachments so you can redo them.  Use this to keep the
 
        headers for a series of different messages with different attachments.
 
        """
 
        del self.attachments[:]
 
        del self.base.parts[:]
 
        self.multipart = False
 

	
 
    def update(self, message):
 
        """
 
        Used to easily set a bunch of heading from another dict
 
        like object.
 
        """
 
        for k in message.keys():
 
            self.base[k] = message[k]
 

	
 
    def __str__(self):
 
        """
 
        Converts to a string.
 
        """
 
        return self.to_message().as_string()
 

	
 
    def _encode_attachment(self, filename=None, content_type=None, data=None,
 
                           disposition=None, part=None):
 
        """
 
        Used internally to take the attachments mentioned in self.attachments
 
        and do the actual encoding in a lazy way when you call to_message.
 
        """
 
        if part:
 
            self.base.parts.append(part)
 
        elif filename:
 
            if not data:
 
                data = open(filename).read()
 

	
 
            self.base.attach_file(filename, data, content_type,
 
                                  disposition or 'attachment')
 
        else:
 
            self.base.attach_text(data, content_type)
 

	
 
        ctype = self.base.content_encoding['Content-Type'][0]
 

	
 
        if ctype and not ctype.startswith('multipart'):
 
            self.base.content_encoding['Content-Type'] = ('multipart/mixed', {})
 

	
 
    def to_message(self):
 
        """
 
        Figures out all the required steps to finally craft the
 
        message you need and return it.  The resulting message
 
        is also available as a self.base attribute.
 

	
 
        What is returned is a Python email API message you can
 
        use with those APIs.  The self.base attribute is the raw
 
        lamson.encoding.MailBase.
 
        """
 
        del self.base.parts[:]
 

	
 
        if self.Body and self.Html:
 
            self.multipart = True
 
            self.base.content_encoding['Content-Type'] = (
 
                'multipart/alternative', {})
 

	
 
        if self.multipart:
 
            self.base.body = None
 
            if self.Body:
 
                self.base.attach_text(self.Body, 'text/plain')
 

	
 
            if self.Html:
 
                self.base.attach_text(self.Html, 'text/html')
 

	
 
            for args in self.attachments:
 
                self._encode_attachment(**args)
 

	
 
        elif self.Body:
 
            self.base.body = self.Body
 
            self.base.content_encoding['Content-Type'] = ('text/plain', {})
 

	
 
        elif self.Html:
 
            self.base.body = self.Html
 
            self.base.content_encoding['Content-Type'] = ('text/html', {})
 

	
 
        return to_message(self.base, separator=self.separator)
 

	
 
    def all_parts(self):
 
        """
 
        Returns all the encoded parts.  Only useful for debugging
 
        or inspecting after calling to_message().
 
        """
 
        return self.base.parts
 

	
 
    def keys(self):
 
        return self.base.keys()
 

	
 

	
 
def to_message(mail, separator="; "):
 
    """
 
    Given a MailBase message, this will construct a MIMEPart
 
    that is canonicalized for use with the Python email API.
 
    """
 
    ctype, params = mail.content_encoding['Content-Type']
 

	
 
    if not ctype:
 
        if mail.parts:
 
            ctype = 'multipart/mixed'
 
        else:
 
            ctype = 'text/plain'
 
    else:
 
        if mail.parts:
 
            assert ctype.startswith(("multipart", "message")), \
 
                   "Content type should be multipart or message, not %r" % ctype
 

	
 
    # adjust the content type according to what it should be now
 
    mail.content_encoding['Content-Type'] = (ctype, params)
 

	
 
    try:
 
        out = MIMEPart(ctype, **params)
 
    except TypeError as exc:  # pragma: no cover
 
        raise EncodingError("Content-Type malformed, not allowed: %r; "
 
                            "%r (Python ERROR: %s" %
 
                            (ctype, params, exc.message))
 

	
 
    for k in mail.keys():
 
        if k in ADDRESS_HEADERS_WHITELIST:
 
            out[k.encode('ascii')] = header_to_mime_encoding(
 
                                         mail[k],
 
                                         not_email=False,
 
                                         separator=separator
 
                                     )
 
        else:
 
            out[k.encode('ascii')] = header_to_mime_encoding(
 
                                         mail[k],
 
                                         not_email=True
 
                                    )
 

	
 
    out.extract_payload(mail)
 

	
 
    # go through the children
 
    for part in mail.parts:
 
        out.attach(to_message(part))
 

	
 
    return out
 

	
 

	
 
class MIMEPart(MIMEBase):
 
    """
 
    A reimplementation of nearly everything in email.mime to be more useful
 
    for actually attaching things.  Rather than one class for every type of
 
    thing you'd encode, there's just this one, and it figures out how to
 
    encode what you ask it.
 
    """
 
    def __init__(self, type, **params):
 
        self.maintype, self.subtype = type.split('/')
 
        MIMEBase.__init__(self, self.maintype, self.subtype, **params)
 

	
 
    def add_text(self, content):
 
        # this is text, so encode it in canonical form
 
        try:
 
            encoded = content.encode('ascii')
 
            charset = 'ascii'
 
        except UnicodeError:
 
            encoded = content.encode('utf-8')
 
            charset = 'utf-8'
 

	
 
        self.set_payload(encoded, charset=charset)
 

	
 
    def extract_payload(self, mail):
 
        if mail.body is None:
 
            return  # only None, '' is still ok
 

	
 
        ctype, ctype_params = mail.content_encoding['Content-Type']
 
        cdisp, cdisp_params = mail.content_encoding['Content-Disposition']
 

	
 
        assert ctype, ("Extract payload requires that mail.content_encoding "
 
                       "have a valid Content-Type.")
 

	
 
        if ctype.startswith("text/"):
 
            self.add_text(mail.body)
 
        else:
 
            if cdisp:
 
                # replicate the content-disposition settings
 
                self.add_header('Content-Disposition', cdisp, **cdisp_params)
 

	
 
            self.set_payload(mail.body)
 
            encoders.encode_base64(self)
 

	
 
    def __repr__(self):
 
        return "<MIMEPart '%s/%s': %r, %r, multipart=%r>" % (
 
            self.subtype,
 
            self.maintype,
 
            self['Content-Type'],
 
            self['Content-Disposition'],
 
            self.is_multipart())
 

	
 

	
 
def header_to_mime_encoding(value, not_email=False, separator=", "):
 
    if not value:
 
        return ""
 

	
 
    encoder = Charset(DEFAULT_ENCODING)
 
    if type(value) == list:
 
        return separator.join(properly_encode_header(
 
            v, encoder, not_email) for v in value)
 
    else:
 
        return properly_encode_header(value, encoder, not_email)
 

	
 

	
 
def properly_encode_header(value, encoder, not_email):
 
    """
 
    The only thing special (weird) about this function is that it tries
 
    to do a fast check to see if the header value has an email address in
 
    it.  Since random headers could have an email address, and email addresses
 
    have weird special formatting rules, we have to check for it.
 

	
 
    Normally this works fine, but in Librelist, we need to "obfuscate" email
 
    addresses by changing the '@' to '-AT-'.  This is where
 
    VALUE_IS_EMAIL_ADDRESS exists.  It's a simple lambda returning True/False
 
    to check if a header value has an email address.  If you need to make this
 
    check different, then change this.
 
    """
 
    try:
 
        return value.encode("ascii")
 
    except UnicodeEncodeError:
 
        if not not_email and VALUE_IS_EMAIL_ADDRESS(value):
 
            # this could have an email address, make sure we don't screw it up
 
            name, address = parseaddr(value)
 
            return '"%s" <%s>' % (
 
                encoder.header_encode(name.encode("utf-8")), address)
 

	
 
        return encoder.header_encode(value.encode("utf-8"))
kallithea/tests/__init__.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
Pylons application test package
 

	
 
This package assumes the Pylons environment is already loaded, such as
 
when this script is imported from the `nosetests --with-pylons=test.ini`
 
command.
 

	
 
This module initializes the application via ``websetup`` (`paster
 
setup-app`) and provides the base testing objects.
 

	
 
nosetests -x - fail on first error
 
nosetests kallithea.tests.functional.test_admin_settings:TestSettingsController.test_my_account
 
nosetests --pdb --pdb-failures
 
nosetests --with-coverage --cover-package=kallithea.model.validators kallithea.tests.test_validators
 

	
 
optional FLAGS:
 
    KALLITHEA_WHOOSH_TEST_DISABLE=1 - skip whoosh index building and tests
 
    KALLITHEA_NO_TMP_PATH=1 - disable new temp path for tests, used mostly for test_vcs_operations
 

	
 
"""
 
import os
 
import re
 
import time
 
import logging
 
import datetime
 
import hashlib
 
import tempfile
 
from os.path import join as jn
 

	
 
from tempfile import _RandomNameSequence
 

	
 
import pylons
 
import pylons.test
 
from pylons import config, url
 
from pylons.i18n.translation import _get_translator
 
from pylons.util import ContextObj
 

	
 
from routes.util import URLGenerator
 
from webtest import TestApp
 
from nose.plugins.skip import SkipTest
 

	
 
from kallithea.lib.compat import unittest
 
from kallithea import is_windows
 
from kallithea.model.db import Notification, User, UserNotification
 
from kallithea.model.meta import Session
 
from kallithea.tests.parameterized import parameterized
 
from kallithea.lib.utils2 import safe_str
 

	
 

	
 
os.environ['TZ'] = 'UTC'
 
if not is_windows:
 
    time.tzset()
 

	
 
log = logging.getLogger(__name__)
 

	
 
__all__ = [
 
    'parameterized', 'environ', 'url', 'TestController',
 
    'SkipTest', 'ldap_lib_installed', 'pam_lib_installed', 'BaseTestCase', 'init_stack',
 
    'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO',
 
    'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
 
    'TEST_USER_ADMIN_EMAIL', 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
 
    'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
 
    'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
 
    'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
 
    'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'HG_REMOTE_REPO',
 
    'GIT_REMOTE_REPO', 'SCM_TESTS',
 
]
 

	
 
# Invoke websetup with the current config file
 
# SetupCommand('setup-app').run([config_file])
 

	
 
environ = {}
 

	
 
#SOME GLOBALS FOR TESTS
 

	
 
TESTS_TMP_PATH = jn('/', 'tmp', 'rc_test_%s' % _RandomNameSequence().next())
 
TEST_USER_ADMIN_LOGIN = 'test_admin'
 
TEST_USER_ADMIN_PASS = 'test12'
 
TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
 
TEST_USER_ADMIN_EMAIL = 'test_admin@example.com'
 

	
 
TEST_USER_REGULAR_LOGIN = 'test_regular'
 
TEST_USER_REGULAR_PASS = 'test12'
 
TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
 
TEST_USER_REGULAR_EMAIL = 'test_regular@example.com'
 

	
 
TEST_USER_REGULAR2_LOGIN = 'test_regular2'
 
TEST_USER_REGULAR2_PASS = 'test12'
 
TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
 
TEST_USER_REGULAR2_EMAIL = 'test_regular2@example.com'
 

	
 
HG_REPO = 'vcs_test_hg'
 
GIT_REPO = 'vcs_test_git'
 

	
 
NEW_HG_REPO = 'vcs_test_hg_new'
 
NEW_GIT_REPO = 'vcs_test_git_new'
 

	
 
HG_FORK = 'vcs_test_hg_fork'
 
GIT_FORK = 'vcs_test_git_fork'
 

	
 
## VCS
 
SCM_TESTS = ['hg', 'git']
 
uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
 

	
 
GIT_REMOTE_REPO = 'git://github.com/codeinn/vcs.git'
 

	
 
TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
 
TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
 
TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
 

	
 

	
 
HG_REMOTE_REPO = 'http://bitbucket.org/marcinkuzminski/vcs'
 

	
 
TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
 
TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
 
TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
 

	
 
TEST_DIR = tempfile.gettempdir()
 
TEST_REPO_PREFIX = 'vcs-test'
 

	
 
# cached repos if any !
 
# comment out to get some other repos from bb or github
 
GIT_REMOTE_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
 
HG_REMOTE_REPO = jn(TESTS_TMP_PATH, HG_REPO)
 

	
 
#skip ldap tests if LDAP lib is not installed
 
ldap_lib_installed = False
 
try:
 
    import ldap
 
    ldap.API_VERSION
 
    ldap_lib_installed = True
 
except ImportError:
 
    # means that python-ldap is not installed
 
    pass
 

	
 
try:
 
    import pam
 
    pam.PAM_TEXT_INFO
 
    pam_lib_installed = True
 
except ImportError:
 
    pam_lib_installed = False
 

	
 
import logging
 

	
 
class NullHandler(logging.Handler):
 
    def emit(self, record):
 
        pass
 

	
 
def init_stack(config=None):
 
    if not config:
 
        config = pylons.test.pylonsapp.config
 
    url._push_object(URLGenerator(config['routes.map'], environ))
 
    pylons.app_globals._push_object(config['pylons.app_globals'])
 
    pylons.config._push_object(config)
 
    pylons.tmpl_context._push_object(ContextObj())
 
    # Initialize a translator for tests that utilize i18n
 
    translator = _get_translator(pylons.config.get('lang'))
 
    pylons.translator._push_object(translator)
 
    h = NullHandler()
 
    logging.getLogger("kallithea").addHandler(h)
 

	
 

	
 
class BaseTestCase(unittest.TestCase):
 
    def __init__(self, *args, **kwargs):
 
        self.wsgiapp = pylons.test.pylonsapp
 
        init_stack(self.wsgiapp.config)
 
        unittest.TestCase.__init__(self, *args, **kwargs)
 

	
 
    def remove_all_notifications(self):
 
        Notification.query().delete()
 

	
 
        # Because query().delete() does not (by default) trigger cascades.
 
        # http://docs.sqlalchemy.org/en/rel_0_7/orm/collections.html#passive-deletes
 
        UserNotification.query().delete()
 

	
 
        Session().commit()
 

	
 

	
 
class TestController(BaseTestCase):
 

	
 
    def __init__(self, *args, **kwargs):
 
        BaseTestCase.__init__(self, *args, **kwargs)
 
        self.app = TestApp(self.wsgiapp)
 
        self.maxDiff = None
 
        self.index_location = config['app_conf']['index_dir']
 

	
 
    def log_user(self, username=TEST_USER_ADMIN_LOGIN,
 
                 password=TEST_USER_ADMIN_PASS):
 
        self._logged_username = username
 
        response = self.app.post(url(controller='login', action='index'),
 
                                 {'username': username,
 
                                  'password': password})
 

	
 
        if 'Invalid username or password' in response.body:
 
            self.fail('could not login using %s %s' % (username, password))
 

	
 
        self.assertEqual(response.status, '302 Found')
 
        self.assert_authenticated_user(response, username)
 

	
 
        response = response.follow()
 
        return response.session['authuser']
 

	
 
    def _get_logged_user(self):
 
        return User.get_by_username(self._logged_username)
 

	
 
    def assert_authenticated_user(self, response, expected_username):
 
        cookie = response.session.get('authuser')
 
        user = cookie and cookie.get('user_id')
 
        user = user and User.get(user)
 
        user = user and user.username
 
        self.assertEqual(user, expected_username)
 
        self.assertEqual(cookie.get('is_authenticated'), True)
 

	
 
    def authentication_token(self):
 
        return self.app.get(url('authentication_token')).body
 

	
 
    def checkSessionFlash(self, response, msg=None, skip=0, _matcher=lambda msg, m: msg in m):
 
        if 'flash' not in response.session:
 
            self.fail(safe_str(u'msg `%s` not found - session has no flash:\n%s' % (msg, response)))
 
        try:
 
            level, m = response.session['flash'][-1 - skip]
 
            if _matcher(msg, m):
 
                return
 
        except IndexError:
 
            pass
 
        self.fail(safe_str(u'msg `%s` not found in session flash (skipping %s): %s' %
 
                           (msg, skip,
 
                            ', '.join('`%s`' % m for level, m in response.session['flash']))))
 

	
 
    def checkSessionFlashRegex(self, response, regex, skip=0):
 
        self.checkSessionFlash(response, regex, skip=skip, _matcher=re.search)
kallithea/tests/api/api_base.py
Show inline comments
 
# -*- coding: utf-8 -*-
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU General Public License for more details.
 
#
 
# You should have received a copy of the GNU General Public License
 
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
 

	
 
"""
 
tests for api. run with::
 

	
 
    KALLITHEA_WHOOSH_TEST_DISABLE=1 nosetests --with-coverage --cover-package=kallithea.controllers.api.api -x kallithea/tests/api
 
"""
 

	
 
import os
 
import random
 
import mock
 

	
 
from kallithea.tests import *
 
from kallithea.tests.fixture import Fixture
 
from kallithea.lib.compat import json
 
from kallithea.lib.auth import AuthUser
 
from kallithea.model.user import UserModel
 
from kallithea.model.user_group import UserGroupModel
 
from kallithea.model.repo import RepoModel
 
from kallithea.model.repo_group import RepoGroupModel
 
from kallithea.model.meta import Session
 
from kallithea.model.scm import ScmModel
 
from kallithea.model.gist import GistModel
 
from kallithea.model.db import Repository, User, Setting
 
from kallithea.lib.utils2 import time_to_datetime
 

	
 

	
 
API_URL = '/_admin/api'
 
TEST_USER_GROUP = 'test_user_group'
 
TEST_REPO_GROUP = 'test_repo_group'
 

	
 
fixture = Fixture()
 

	
 

	
 
def _build_data(apikey, method, **kw):
 
    """
 
    Builds API data with given random ID
 

	
 
    :param random_id:
 
    """
 
    random_id = random.randrange(1, 9999)
 
    return random_id, json.dumps({
 
        "id": random_id,
 
        "api_key": apikey,
 
        "method": method,
 
        "args": kw
 
    })
 

	
 

	
 
jsonify = lambda obj: json.loads(json.dumps(obj))
 

	
 

	
 
def crash(*args, **kwargs):
 
    raise Exception('Total Crash !')
 

	
 

	
 
def api_call(test_obj, params):
 
    response = test_obj.app.post(API_URL, content_type='application/json',
 
                                 params=params)
 
    return response
 

	
 

	
 
## helpers
 
def make_user_group(name=TEST_USER_GROUP):
 
    gr = fixture.create_user_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
 
    UserGroupModel().add_user_to_group(user_group=gr,
 
                                       user=TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
def make_repo_group(name=TEST_REPO_GROUP):
 
    gr = fixture.create_repo_group(name, cur_user=TEST_USER_ADMIN_LOGIN)
 
    Session().commit()
 
    return gr
 

	
 

	
 
class _BaseTestApi(object):
 
    REPO = None
 
    REPO_TYPE = None
 

	
 
    @classmethod
 
    def setup_class(cls):
 
        cls.usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        cls.apikey = cls.usr.api_key
 
        cls.test_user = UserModel().create_or_update(
 
            username='test-api',
 
            password='test',
 
            email='test@example.com',
 
            firstname='first',
 
            lastname='last'
 
        )
 
        Session().commit()
 
        cls.TEST_USER_LOGIN = cls.test_user.username
 
        cls.apikey_regular = cls.test_user.api_key
 

	
 
    @classmethod
 
    def teardown_class(cls):
 
        pass
 

	
 
    def setUp(self):
 
        self.maxDiff = None
 
        make_user_group()
 
        make_repo_group()
 

	
 
    def tearDown(self):
 
        fixture.destroy_user_group(TEST_USER_GROUP)
 
        fixture.destroy_gists()
 
        fixture.destroy_repo_group(TEST_REPO_GROUP)
 

	
 
    def _compare_ok(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': None,
 
            'result': expected
 
        })
 
        given = json.loads(given)
 
        self.assertEqual(expected, given)
 

	
 
    def _compare_error(self, id_, expected, given):
 
        expected = jsonify({
 
            'id': id_,
 
            'error': expected,
 
            'result': None
 
        })
 
        given = json.loads(given)
 
        self.assertEqual(expected, given)
 

	
 
    def test_Optional_object(self):
 
        from kallithea.controllers.api.api import Optional
 

	
 
        option1 = Optional(None)
 
        self.assertEqual('<Optional:%s>' % None, repr(option1))
 
        self.assertEqual(option1(), None)
 

	
 
        self.assertEqual(1, Optional.extract(Optional(1)))
 
        self.assertEqual('trololo', Optional.extract('trololo'))
 

	
 
    def test_Optional_OAttr(self):
 
        from kallithea.controllers.api.api import Optional, OAttr
 

	
 
        option1 = Optional(OAttr('apiuser'))
 
        self.assertEqual('apiuser', Optional.extract(option1))
 

	
 
    def test_OAttr_object(self):
 
        from kallithea.controllers.api.api import OAttr
 

	
 
        oattr1 = OAttr('apiuser')
 
        self.assertEqual('<OptionalAttr:apiuser>', repr(oattr1))
 
        self.assertEqual(oattr1(), oattr1)
 

	
 
    def test_api_wrong_key(self):
 
        id_, params = _build_data('trololo', 'get_user')
 
        response = api_call(self, params)
 

	
 
        expected = 'Invalid API key'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param_args_null(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        params = params.replace('"args": {}', '"args": null')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_missing_non_optional_param_args_bad(self):
 
        id_, params = _build_data(self.apikey, 'get_repo')
 
        params = params.replace('"args": {}', '"args": 1')
 
        response = api_call(self, params)
 

	
 
        expected = 'Missing non optional `repoid` arg in JSON DATA'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_args_is_null(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        params = params.replace('"args": {}', '"args": null')
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_api_args_is_bad(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        params = params.replace('"args": {}', '"args": 1')
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 

	
 
    def test_api_args_different_args(self):
 
        import string
 
        expected = {
 
            'ascii_letters': string.ascii_letters,
 
            'ws': string.whitespace,
 
            'printables': string.printable
 
        }
 
        id_, params = _build_data(self.apikey, 'test', args=expected)
 
        response = api_call(self, params)
 
        self.assertEqual(response.status, '200 OK')
 
        self._compare_ok(id_, expected, response.body)
 

	
 
    def test_api_get_users(self):
 
        id_, params = _build_data(self.apikey, 'get_users', )
 
        response = api_call(self, params)
 
        ret_all = []
 
        _users = User.query().filter(User.username != User.DEFAULT_USER) \
 
            .order_by(User.username).all()
 
        for usr in _users:
 
            ret = usr.get_api_data()
 
            ret_all.append(jsonify(ret))
 
        expected = ret_all
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_that_does_not_exist(self):
 
        id_, params = _build_data(self.apikey, 'get_user',
 
                                  userid='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "user `%s` does not exist" % 'trololo'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid(self):
 
        id_, params = _build_data(self.apikey, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_without_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(self.TEST_USER_LOGIN)
 
        ret = usr.get_api_data()
 
        ret['permissions'] = AuthUser(dbuser=usr).permissions
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_with_giving_userid_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_user',
 
                                  userid=self.TEST_USER_LOGIN)
 
        response = api_call(self, params)
 

	
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_pull(self):
 
        repo_name = 'test_pull'
 
        r = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        r.clone_uri = os.path.join(TESTS_TMP_PATH, self.REPO)
 
        Session.add(r)
 
        Session.commit()
 

	
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=repo_name,)
 
        response = api_call(self, params)
 

	
 
        expected = {'msg': 'Pulled from `%s`' % repo_name,
 
                    'repository': repo_name}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_pull_error(self):
 
        id_, params = _build_data(self.apikey, 'pull',
 
                                  repoid=self.REPO, )
 
        response = api_call(self, params)
 

	
 
        expected = 'Unable to pull changes from `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_rescan_repos(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos')
 
        response = api_call(self, params)
 

	
 
        expected = {'added': [], 'removed': []}
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'repo_scan', crash)
 
    def test_api_rescann_error(self):
 
        id_, params = _build_data(self.apikey, 'rescan_repos', )
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during rescan repositories action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached()  # seed cache
 

	
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = {
 
            'msg': "Cache for repository `%s` was invalidated" % (self.REPO,),
 
            'repository': self.REPO
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(ScmModel, 'mark_for_invalidation', crash)
 
    def test_api_invalidate_cache_error(self):
 
        id_, params = _build_data(self.apikey, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred during cache invalidation action'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_invalidate_cache_regular_user_no_permission(self):
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        repo.scm_instance_cached() # seed cache
 

	
 
        id_, params = _build_data(self.apikey_regular, 'invalidate_cache',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = "repository `%s` does not exist" % (self.REPO,)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = {
 
            'repo': self.REPO, 'locked': True,
 
            'locked_since': response.json['result']['locked_since'],
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_by_non_admin(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = {
 
                'repo': repo_name,
 
                'locked': True,
 
                'locked_since': response.json['result']['locked_since'],
 
                'locked_by': self.TEST_USER_LOGIN,
 
                'lock_state_changed': True,
 
                'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                        % (self.TEST_USER_LOGIN, repo_name, True))
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_with_userid(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'lock',
 
                                      userid=TEST_USER_ADMIN_LOGIN,
 
                                      repoid=repo_name,
 
                                      locked=True)
 
            response = api_call(self, params)
 
            expected = 'userid is not the same as your user'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_lock_repo_lock_aquire_non_admin_not_his_repo(self):
 
        id_, params = _build_data(self.apikey_regular, 'lock',
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_release(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=False)
 
        response = api_call(self, params)
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': False,
 
            'locked_since': None,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, False))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_aquire_optional_userid(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 
        time_ = response.json['result']['locked_since']
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': True,
 
            'locked_since': time_,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': True,
 
            'msg': ('User `%s` set lock state for repo `%s` to `%s`'
 
                    % (TEST_USER_ADMIN_LOGIN, self.REPO, True))
 
        }
 

	
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_optional_locked(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 
        time_ = response.json['result']['locked_since']
 
        expected = {
 
            'repo': self.REPO,
 
            'locked': True,
 
            'locked_since': time_,
 
            'locked_by': TEST_USER_ADMIN_LOGIN,
 
            'lock_state_changed': False,
 
            'msg': ('Repo `%s` locked by `%s` on `%s`.'
 
                    % (self.REPO, TEST_USER_ADMIN_LOGIN,
 
                       json.dumps(time_to_datetime(time_))))
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_lock_repo_lock_optional_not_locked(self):
 
        repo_name = 'api_not_locked'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        self.assertEqual(repo.locked, [None, None])
 
        try:
 
            id_, params = _build_data(self.apikey, 'lock',
 
                                      repoid=repo.repo_id)
 
            response = api_call(self, params)
 
            expected = {
 
                'repo': repo_name,
 
                'locked': False,
 
                'locked_since': None,
 
                'locked_by': None,
 
                'lock_state_changed': False,
 
                'msg': ('Repo `%s` not locked.' % (repo_name,))
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    @mock.patch.object(Repository, 'lock', crash)
 
    def test_api_lock_error(self):
 
        id_, params = _build_data(self.apikey, 'lock',
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  repoid=self.REPO,
 
                                  locked=True)
 
        response = api_call(self, params)
 

	
 
        expected = 'Error occurred locking repository `%s`' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks_regular_user(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_locks')
 
        response = api_call(self, params)
 
        expected = []
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks_with_userid_regular_user(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_locks',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 
        expected = 'userid is not the same as your user'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks(self):
 
        id_, params = _build_data(self.apikey, 'get_locks')
 
        response = api_call(self, params)
 
        expected = []
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_locks_with_one_locked_repo(self):
 
        repo_name = 'api_delete_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                                   cur_user=self.TEST_USER_LOGIN)
 
        Repository.lock(repo, User.get_by_username(self.TEST_USER_LOGIN).user_id)
 
        try:
 
            id_, params = _build_data(self.apikey, 'get_locks')
 
            response = api_call(self, params)
 
            expected = [repo.get_api_data()]
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_get_locks_with_one_locked_repo_for_specific_user(self):
 
        repo_name = 'api_delete_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                                   cur_user=self.TEST_USER_LOGIN)
 
        Repository.lock(repo, User.get_by_username(self.TEST_USER_LOGIN).user_id)
 
        try:
 
            id_, params = _build_data(self.apikey, 'get_locks',
 
                                      userid=self.TEST_USER_LOGIN)
 
            response = api_call(self, params)
 
            expected = [repo.get_api_data()]
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_get_locks_with_userid(self):
 
        id_, params = _build_data(self.apikey, 'get_locks',
 
                                  userid=TEST_USER_REGULAR_LOGIN)
 
        response = api_call(self, params)
 
        expected = []
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_create_existing_user(self):
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=TEST_USER_ADMIN_LOGIN,
 
                                  email='test@foo.com',
 
                                  email='test@example.com',
 
                                  password='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "user `%s` already exist" % TEST_USER_ADMIN_LOGIN
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_user_with_existing_email(self):
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=TEST_USER_ADMIN_LOGIN + 'new',
 
                                  email=TEST_USER_REGULAR_EMAIL,
 
                                  password='trololo')
 
        response = api_call(self, params)
 

	
 
        expected = "email `%s` already exist" % TEST_USER_REGULAR_EMAIL
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_user(self):
 
        username = 'test_new_api_user'
 
        email = username + "@foo.com"
 
        email = username + "@example.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email,
 
                                  password='trololo')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 

	
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    def test_api_create_user_without_password(self):
 
        username = 'test_new_api_user_passwordless'
 
        email = username + "@foo.com"
 
        email = username + "@example.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email)
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    def test_api_create_user_with_extern_name(self):
 
        username = 'test_new_api_user_passwordless'
 
        email = username + "@foo.com"
 
        email = username + "@example.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email, extern_name='internal')
 
        response = api_call(self, params)
 

	
 
        usr = User.get_by_username(username)
 
        ret = dict(
 
            msg='created new user `%s`' % username,
 
            user=jsonify(usr.get_api_data())
 
        )
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user(usr.user_id)
 

	
 
    @mock.patch.object(UserModel, 'create_or_update', crash)
 
    def test_api_create_user_when_exception_happened(self):
 

	
 
        username = 'test_new_api_user'
 
        email = username + "@foo.com"
 
        email = username + "@example.com"
 

	
 
        id_, params = _build_data(self.apikey, 'create_user',
 
                                  username=username,
 
                                  email=email,
 
                                  password='trololo')
 
        response = api_call(self, params)
 
        expected = 'failed to create user `%s`' % username
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_delete_user(self):
 
        usr = UserModel().create_or_update(username=u'test_user',
 
                                           password=u'qweqwe',
 
                                           email=u'u232@example.com',
 
                                           firstname=u'u1', lastname=u'u1')
 
        Session().commit()
 
        username = usr.username
 
        email = usr.email
 
        usr_id = usr.user_id
 
        ## DELETE THIS USER NOW
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user',
 
                                  userid=username, )
 
        response = api_call(self, params)
 

	
 
        ret = {'msg': 'deleted user ID:%s %s' % (usr_id, username),
 
               'user': None}
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserModel, 'delete', crash)
 
    def test_api_delete_user_when_exception_happened(self):
 
        usr = UserModel().create_or_update(username=u'test_user',
 
                                           password=u'qweqwe',
 
                                           email=u'u232@example.com',
 
                                           firstname=u'u1', lastname=u'u1')
 
        Session().commit()
 
        username = usr.username
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user',
 
                                  userid=username, )
 
        response = api_call(self, params)
 
        ret = 'failed to delete user ID:%s %s' % (usr.user_id,
 
                                                  usr.username)
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('firstname', 'new_username'),
 
                           ('lastname', 'new_username'),
 
                           ('email', 'new_username'),
 
                           ('admin', True),
 
                           ('admin', False),
 
                           ('extern_type', 'ldap'),
 
                           ('extern_type', None),
 
                           ('extern_name', 'test'),
 
                           ('extern_name', None),
 
                           ('active', False),
 
                           ('active', True),
 
                           ('password', 'newpass')
 
    ])
 
    def test_api_update_user(self, name, expected):
 
        usr = User.get_by_username(self.TEST_USER_LOGIN)
 
        kw = {name: expected,
 
              'userid': usr.user_id}
 
        id_, params = _build_data(self.apikey, 'update_user', **kw)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, self.TEST_USER_LOGIN),
 
            'user': jsonify(User \
 
                .get_by_username(self.TEST_USER_LOGIN) \
 
                .get_api_data())
 
        }
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_no_changed_params(self):
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 

	
 
        response = api_call(self, params)
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, TEST_USER_ADMIN_LOGIN),
 
            'user': ret
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_by_user_id(self):
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        ret = {
 
            'msg': 'updated user ID:%s %s' % (
 
                usr.user_id, TEST_USER_ADMIN_LOGIN),
 
            'user': ret
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_update_user_default_user(self):
 
        usr = User.get_default_user()
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        expected = 'editing default user is forbidden'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserModel, 'update_user', crash)
 
    def test_api_update_user_when_exception_happens(self):
 
        usr = User.get_by_username(TEST_USER_ADMIN_LOGIN)
 
        ret = jsonify(usr.get_api_data())
 
        id_, params = _build_data(self.apikey, 'update_user',
 
                                  userid=usr.user_id)
 

	
 
        response = api_call(self, params)
 
        ret = 'failed to update user `%s`' % usr.user_id
 

	
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo(self):
 
        new_group = 'some_new_group'
 
        make_user_group(new_group)
 
        RepoModel().grant_user_group_permission(repo=self.REPO,
 
                                                group_name=new_group,
 
                                                perm='repository.read')
 
        Session().commit()
 
        id_, params = _build_data(self.apikey, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        ret = repo.get_api_data()
 

	
 
        members = []
 
        followers = []
 
        for user in repo.repo_to_perm:
 
            perm = user.permission.permission_name
 
            user = user.user
 
            user_data = {'name': user.username, 'type': "user",
 
                         'permission': perm}
 
            members.append(user_data)
 

	
 
        for user_group in repo.users_group_to_perm:
 
            perm = user_group.permission.permission_name
 
            user_group = user_group.users_group
 
            user_group_data = {'name': user_group.users_group_name,
 
                               'type': "user_group", 'permission': perm}
 
            members.append(user_group_data)
 

	
 
        for user in repo.followers:
 
            followers.append(user.user.get_api_data())
 

	
 
        ret['members'] = members
 
        ret['followers'] = followers
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_user_group(new_group)
 

	
 
    @parameterized.expand([
 
        ('repository.admin',),
 
        ('repository.write',),
 
        ('repository.read',),
 
    ])
 
    def test_api_get_repo_by_non_admin(self, grant_perm):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=grant_perm)
 
        Session().commit()
 
        id_, params = _build_data(self.apikey_regular, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(self.REPO)
 
        ret = repo.get_api_data()
 

	
 
        members = []
 
        followers = []
 
        self.assertEqual(2, len(repo.repo_to_perm))
 
        for user in repo.repo_to_perm:
 
            perm = user.permission.permission_name
 
            user_obj = user.user
 
            user_data = {'name': user_obj.username, 'type': "user",
 
                         'permission': perm}
 
            members.append(user_data)
 

	
 
        for user_group in repo.users_group_to_perm:
 
            perm = user_group.permission.permission_name
 
            user_group_obj = user_group.users_group
 
            user_group_data = {'name': user_group_obj.users_group_name,
 
                               'type': "user_group", 'permission': perm}
 
            members.append(user_group_data)
 

	
 
        for user in repo.followers:
 
            followers.append(user.user.get_api_data())
 

	
 
        ret['members'] = members
 
        ret['followers'] = followers
 

	
 
        expected = ret
 
        try:
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 

	
 
    def test_api_get_repo_by_non_admin_no_permission_to_repo(self):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.none')
 

	
 
        id_, params = _build_data(self.apikey_regular, 'get_repo',
 
                                  repoid=self.REPO)
 
        response = api_call(self, params)
 

	
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_that_doesn_not_exist(self):
 
        id_, params = _build_data(self.apikey, 'get_repo',
 
                                  repoid='no-such-repo')
 
        response = api_call(self, params)
 

	
 
        ret = 'repository `%s` does not exist' % 'no-such-repo'
 
        expected = ret
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repos(self):
 
        id_, params = _build_data(self.apikey, 'get_repos')
 
        response = api_call(self, params)
 

	
 
        result = []
 
        for repo in RepoModel().get_all():
 
            result.append(repo.get_api_data())
 
        ret = jsonify(result)
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_repos_non_admin(self):
 
        id_, params = _build_data(self.apikey_regular, 'get_repos')
 
        response = api_call(self, params)
 

	
 
        result = []
 
        for repo in RepoModel().get_all_user_repos(self.TEST_USER_LOGIN):
 
            result.append(repo.get_api_data())
 
        ret = jsonify(result)
 

	
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('all', 'all'),
 
                           ('dirs', 'dirs'),
 
                           ('files', 'files'), ])
 
    def test_api_get_repo_nodes(self, name, ret_type):
 
        rev = 'tip'
 
        path = '/'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        # we don't the actual return types here since it's tested somewhere
 
        # else
 
        expected = response.json['result']
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_revisions(self):
 
        rev = 'i-dont-exist'
 
        path = '/'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to get repo: `%s` nodes' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_path(self):
 
        rev = 'tip'
 
        path = '/idontexits'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to get repo: `%s` nodes' % self.REPO
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_repo_nodes_bad_ret_type(self):
 
        rev = 'tip'
 
        path = '/'
 
        ret_type = 'error'
 
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        expected = ('ret_type must be one of %s'
 
                    % (','.join(['files', 'dirs', 'all'])))
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('all', 'all', 'repository.write'),
 
                           ('dirs', 'dirs', 'repository.admin'),
 
                           ('files', 'files', 'repository.read'), ])
 
    def test_api_get_repo_nodes_by_regular_user(self, name, ret_type, grant_perm):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=grant_perm)
 
        Session().commit()
 

	
 
        rev = 'tip'
 
        path = '/'
 
        id_, params = _build_data(self.apikey_regular, 'get_repo_nodes',
 
                                  repoid=self.REPO, revision=rev,
 
                                  root_path=path,
 
                                  ret_type=ret_type)
 
        response = api_call(self, params)
 

	
 
        # we don't the actual return types here since it's tested somewhere
 
        # else
 
        expected = response.json['result']
 
        try:
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            RepoModel().revoke_user_permission(self.REPO, self.TEST_USER_LOGIN)
 

	
 
    def test_api_create_repo(self):
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_and_repo_group(self):
 
        repo_name = 'my_gr/api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        print params
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 
        fixture.destroy_repo_group('my_gr')
 

	
 
    def test_api_create_repo_in_repo_group_without_permission(self):
 
        repo_group_name = '%s/api-repo-repo' % TEST_REPO_GROUP
 
        repo_name = '%s/api-repo' % repo_group_name
 

	
 
        rg = fixture.create_repo_group(repo_group_name)
 
        Session().commit()
 
        RepoGroupModel().grant_user_permission(repo_group_name,
 
                                               self.TEST_USER_LOGIN,
 
                                               'group.none')
 
        Session().commit()
 

	
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        # Current result when API access control is different from Web:
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
        # Expected and arguably more correct result:
 
        #expected = 'failed to create repository `%s`' % repo_name
 
        #self._compare_error(id_, expected, given=response.body)
 

	
 
        fixture.destroy_repo_group(repo_group_name)
 

	
 
    def test_api_create_repo_unknown_owner(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=owner,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 
        expected = 'user `%s` does not exist' % owner
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_create_repo_dont_specify_owner(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_by_non_admin(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
        )
 
        response = api_call(self, params)
 

	
 
        repo = RepoModel().get_by_repo_name(repo_name)
 
        self.assertNotEqual(repo, None)
 
        ret = {
 
            'msg': 'Created new repository `%s`' % repo_name,
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_by_non_admin_specify_owner(self):
 
        repo_name = 'api-repo'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey_regular, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  repo_type=self.REPO_TYPE,
 
                                  owner=owner)
 
        response = api_call(self, params)
 

	
 
        expected = 'Only Kallithea admin can specify `owner` param'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(repo_name)
 

	
 
    def test_api_create_repo_exists(self):
 
        repo_name = self.REPO
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = "repo `%s` already exist" % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'create', crash)
 
    def test_api_create_repo_exception_occurred(self):
 
        repo_name = 'api-repo'
 
        id_, params = _build_data(self.apikey, 'create_repo',
 
                                  repo_name=repo_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
                                  repo_type=self.REPO_TYPE,)
 
        response = api_call(self, params)
 
        expected = 'failed to create repository `%s`' % repo_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([
 
        ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
 
        ('description', {'description': 'new description'}),
 
        ('active', {'active': True}),
 
        ('active', {'active': False}),
 
        ('clone_uri', {'clone_uri': 'http://foo.com/repo'}),
 
        ('clone_uri', {'clone_uri': 'http://example.com/repo'}),
 
        ('clone_uri', {'clone_uri': None}),
 
        ('landing_rev', {'landing_rev': 'branch:master'}),
 
        ('enable_statistics', {'enable_statistics': True}),
 
        ('enable_locking', {'enable_locking': True}),
 
        ('enable_downloads', {'enable_downloads': True}),
 
        ('name', {'name': 'new_repo_name'}),
 
        ('repo_group', {'group': 'test_group_for_update'}),
 
    ])
 
    def test_api_update_repo(self, changing_attr, updates):
 
        repo_name = 'api_update_me'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        if changing_attr == 'repo_group':
 
            fixture.create_repo_group(updates['group'])
 

	
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        if changing_attr == 'name':
 
            repo_name = updates['name']
 
        if changing_attr == 'repo_group':
 
            repo_name = '/'.join([updates['group'], repo_name])
 
        try:
 
            expected = {
 
                'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
 
                'repository': repo.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            if changing_attr == 'repo_group':
 
                fixture.destroy_repo_group(updates['group'])
 

	
 
    def test_api_update_repo_repo_group_does_not_exist(self):
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name)
 
        updates = {'group': 'test_group_for_update'}
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'repository group `%s` does not exist' % updates['group']
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_update_repo_regular_user_not_allowed(self):
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name)
 
        updates = {'active': False}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'repository `%s` does not exist' % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    @mock.patch.object(RepoModel, 'update', crash)
 
    def test_api_update_repo_exception_occurred(self):
 
        repo_name = 'api_update_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        id_, params = _build_data(self.apikey, 'update_repo',
 
                                  repoid=repo_name, owner=TEST_USER_ADMIN_LOGIN,)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'failed to update repo `%s`' % repo_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_repo_name(self):
 
        repo_name = 'admin_owned'
 
        new_repo_name = 'new_repo_name'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        UserModel().revoke_perm('default', 'hg.create.repository')
 
        UserModel().grant_perm('default', 'hg.create.none')
 
        updates = {'name': new_repo_name}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'no permission to create (or move) repositories'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            fixture.destroy_repo(new_repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_repo_name_allowed(self):
 
        repo_name = 'admin_owned'
 
        new_repo_name = 'new_repo_name'
 
        repo = fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        UserModel().revoke_perm('default', 'hg.create.none')
 
        UserModel().grant_perm('default', 'hg.create.repository')
 
        updates = {'name': new_repo_name}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = {
 
                'msg': 'updated repo ID:%s %s' % (repo.repo_id, new_repo_name),
 
                'repository': repo.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 
            fixture.destroy_repo(new_repo_name)
 

	
 
    def test_api_update_repo_regular_user_change_owner(self):
 
        repo_name = 'admin_owned'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        RepoModel().grant_user_permission(repo=repo_name,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.admin')
 
        updates = {'owner': TEST_USER_ADMIN_LOGIN}
 
        id_, params = _build_data(self.apikey_regular, 'update_repo',
 
                                  repoid=repo_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'Only Kallithea admin can specify `owner` param'
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 

	
 
        id_, params = _build_data(self.apikey, 'delete_repo',
 
                                  repoid=repo_name, )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Deleted repository `%s`' % repo_name,
 
            'success': True
 
        }
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_by_non_admin(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE,
 
                            cur_user=self.TEST_USER_LOGIN)
 
        id_, params = _build_data(self.apikey_regular, 'delete_repo',
 
                                  repoid=repo_name, )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Deleted repository `%s`' % repo_name,
 
            'success': True
 
        }
 
        try:
 
            expected = ret
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_by_non_admin_no_permission(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        try:
 
            id_, params = _build_data(self.apikey_regular, 'delete_repo',
 
                                      repoid=repo_name, )
 
            response = api_call(self, params)
 
            expected = 'repository `%s` does not exist' % (repo_name)
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_delete_repo_exception_occurred(self):
 
        repo_name = 'api_delete_me'
 
        fixture.create_repo(repo_name, repo_type=self.REPO_TYPE)
 
        try:
 
            with mock.patch.object(RepoModel, 'delete', crash):
 
                id_, params = _build_data(self.apikey, 'delete_repo',
 
                                          repoid=repo_name, )
 
                response = api_call(self, params)
 

	
 
                expected = 'failed to delete repository `%s`' % repo_name
 
                self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(repo_name)
 

	
 
    def test_api_fork_repo(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
 
                                                     fork_name),
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Created fork of `%s` as `%s`' % (self.REPO,
 
                                                     fork_name),
 
            'success': True,
 
            'task': None,
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin_specify_owner(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 
        expected = 'Only Kallithea admin can specify `owner` param'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_non_admin_no_permission_to_fork(self):
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm='repository.none')
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 
        expected = 'repository `%s` does not exist' % (self.REPO)
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    @parameterized.expand([('read', 'repository.read'),
 
                           ('write', 'repository.write'),
 
                           ('admin', 'repository.admin')])
 
    def test_api_fork_repo_non_admin_no_create_repo_permission(self, name, perm):
 
        fork_name = 'api-repo-fork'
 
        # regardless of base repository permission, forking is disallowed
 
        # when repository creation is disabled
 
        RepoModel().grant_user_permission(repo=self.REPO,
 
                                          user=self.TEST_USER_LOGIN,
 
                                          perm=perm)
 
        UserModel().revoke_perm('default', 'hg.create.repository')
 
        UserModel().grant_perm('default', 'hg.create.none')
 
        id_, params = _build_data(self.apikey_regular, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
        )
 
        response = api_call(self, params)
 
        expected = 'no permission to create repositories'
 
        self._compare_error(id_, expected, given=response.body)
 
        fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_unknown_owner(self):
 
        fork_name = 'api-repo-fork'
 
        owner = 'i-dont-exist'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=owner,
 
        )
 
        response = api_call(self, params)
 
        expected = 'user `%s` does not exist' % owner
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_fork_repo_fork_exists(self):
 
        fork_name = 'api-repo-fork'
 
        fixture.create_fork(self.REPO, fork_name)
 

	
 
        try:
 
            fork_name = 'api-repo-fork'
 

	
 
            id_, params = _build_data(self.apikey, 'fork_repo',
 
                                      repoid=self.REPO,
 
                                      fork_name=fork_name,
 
                                      owner=TEST_USER_ADMIN_LOGIN,
 
            )
 
            response = api_call(self, params)
 

	
 
            expected = "fork `%s` already exist" % fork_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_repo(fork_name)
 

	
 
    def test_api_fork_repo_repo_exists(self):
 
        fork_name = self.REPO
 

	
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        expected = "repo `%s` already exist" % fork_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'create_fork', crash)
 
    def test_api_fork_repo_exception_occurred(self):
 
        fork_name = 'api-repo-fork'
 
        id_, params = _build_data(self.apikey, 'fork_repo',
 
                                  repoid=self.REPO,
 
                                  fork_name=fork_name,
 
                                  owner=TEST_USER_ADMIN_LOGIN,
 
        )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to fork repository `%s` as `%s`' % (self.REPO,
 
                                                               fork_name)
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_group(self):
 
        id_, params = _build_data(self.apikey, 'get_user_group',
 
                                  usergroupid=TEST_USER_GROUP)
 
        response = api_call(self, params)
 

	
 
        user_group = UserGroupModel().get_group(TEST_USER_GROUP)
 
        members = []
 
        for user in user_group.members:
 
            user = user.user
 
            members.append(user.get_api_data())
 

	
 
        ret = user_group.get_api_data()
 
        ret['members'] = members
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_get_user_groups(self):
 
        gr_name = 'test_user_group2'
 
        make_user_group(gr_name)
 

	
 
        id_, params = _build_data(self.apikey, 'get_user_groups', )
 
        response = api_call(self, params)
 

	
 
        try:
 
            expected = []
 
            for gr_name in [TEST_USER_GROUP, 'test_user_group2']:
 
                user_group = UserGroupModel().get_group(gr_name)
 
                ret = user_group.get_api_data()
 
                expected.append(ret)
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_create_user_group(self):
 
        group_name = 'some_new_group'
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=group_name)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'created new user group `%s`' % group_name,
 
            'user_group': jsonify(UserGroupModel() \
 
                .get_by_name(group_name) \
 
                .get_api_data())
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
        fixture.destroy_user_group(group_name)
 

	
 
    def test_api_get_user_group_that_exist(self):
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=TEST_USER_GROUP)
 
        response = api_call(self, params)
 

	
 
        expected = "user group `%s` already exist" % TEST_USER_GROUP
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserGroupModel, 'create', crash)
 
    def test_api_get_user_group_exception_occurred(self):
 
        group_name = 'exception_happens'
 
        id_, params = _build_data(self.apikey, 'create_user_group',
 
                                  group_name=group_name)
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to create group `%s`' % group_name
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('group_name', {'group_name': 'new_group_name'}),
 
                           ('group_name', {'group_name': 'test_group_for_update'}),
 
                           ('owner', {'owner': TEST_USER_REGULAR_LOGIN}),
 
                           ('active', {'active': False}),
 
                           ('active', {'active': True})])
 
    def test_api_update_user_group(self, changing_attr, updates):
 
        gr_name = 'test_group_for_update'
 
        user_group = fixture.create_user_group(gr_name)
 
        id_, params = _build_data(self.apikey, 'update_user_group',
 
                                  usergroupid=gr_name, **updates)
 
        response = api_call(self, params)
 
        try:
 
            expected = {
 
               'msg': 'updated user group ID:%s %s' % (user_group.users_group_id,
 
                                                     user_group.users_group_name),
 
               'user_group': user_group.get_api_data()
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            if changing_attr == 'group_name':
 
                # switch to updated name for proper cleanup
 
                gr_name = updates['group_name']
 
            fixture.destroy_user_group(gr_name)
 

	
 
    @mock.patch.object(UserGroupModel, 'update', crash)
 
    def test_api_update_user_group_exception_occurred(self):
 
        gr_name = 'test_group'
 
        fixture.create_user_group(gr_name)
 
        id_, params = _build_data(self.apikey, 'update_user_group',
 
                                  usergroupid=gr_name)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'failed to update user group `%s`' % gr_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_add_user_to_user_group(self):
 
        gr_name = 'test_group'
 
        fixture.create_user_group(gr_name)
 
        id_, params = _build_data(self.apikey, 'add_user_to_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 
        try:
 
            expected = {
 
            'msg': 'added member `%s` to user group `%s`' % (
 
                    TEST_USER_ADMIN_LOGIN, gr_name),
 
            'success': True
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_add_user_to_user_group_that_doesnt_exist(self):
 
        id_, params = _build_data(self.apikey, 'add_user_to_user_group',
 
                                  usergroupid='false-group',
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        expected = 'user group `%s` does not exist' % 'false-group'
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(UserGroupModel, 'add_user_to_group', crash)
 
    def test_api_add_user_to_user_group_exception_occurred(self):
 
        gr_name = 'test_group'
 
        fixture.create_user_group(gr_name)
 
        id_, params = _build_data(self.apikey, 'add_user_to_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        try:
 
            expected = 'failed to add member to user group `%s`' % gr_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_remove_user_from_user_group(self):
 
        gr_name = 'test_group_3'
 
        gr = fixture.create_user_group(gr_name)
 
        UserGroupModel().add_user_to_group(gr, user=TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 
        id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        try:
 
            expected = {
 
                'msg': 'removed member `%s` from user group `%s`' % (
 
                    TEST_USER_ADMIN_LOGIN, gr_name
 
                ),
 
                'success': True}
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    @mock.patch.object(UserGroupModel, 'remove_user_from_group', crash)
 
    def test_api_remove_user_from_user_group_exception_occurred(self):
 
        gr_name = 'test_group_3'
 
        gr = fixture.create_user_group(gr_name)
 
        UserGroupModel().add_user_to_group(gr, user=TEST_USER_ADMIN_LOGIN)
 
        Session().commit()
 
        id_, params = _build_data(self.apikey, 'remove_user_from_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 
        try:
 
            expected = 'failed to remove member from user group `%s`' % gr_name
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_delete_user_group(self):
 
        gr_name = 'test_group'
 
        ugroup = fixture.create_user_group(gr_name)
 
        gr_id = ugroup.users_group_id
 
        id_, params = _build_data(self.apikey, 'delete_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        try:
 
            expected = {
 
                'user_group': None,
 
                'msg': 'deleted user group ID:%s %s' % (gr_id, gr_name)
 
            }
 
            self._compare_ok(id_, expected, given=response.body)
 
        finally:
 
            if UserGroupModel().get_by_name(gr_name):
 
                fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_delete_user_group_that_is_assigned(self):
 
        gr_name = 'test_group'
 
        ugroup = fixture.create_user_group(gr_name)
 
        gr_id = ugroup.users_group_id
 

	
 
        ugr_to_perm = RepoModel().grant_user_group_permission(self.REPO, gr_name, 'repository.write')
 
        msg = 'User Group assigned to %s' % ugr_to_perm.repository.repo_name
 

	
 
        id_, params = _build_data(self.apikey, 'delete_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 
        response = api_call(self, params)
 

	
 
        try:
 
            expected = msg
 
            self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            if UserGroupModel().get_by_name(gr_name):
 
                fixture.destroy_user_group(gr_name)
 

	
 
    def test_api_delete_user_group_exception_occurred(self):
 
        gr_name = 'test_group'
 
        ugroup = fixture.create_user_group(gr_name)
 
        gr_id = ugroup.users_group_id
 
        id_, params = _build_data(self.apikey, 'delete_user_group',
 
                                  usergroupid=gr_name,
 
                                  userid=TEST_USER_ADMIN_LOGIN)
 

	
 
        try:
 
            with mock.patch.object(UserGroupModel, 'delete', crash):
 
                response = api_call(self, params)
 
                expected = 'failed to delete user group ID:%s %s' % (gr_id, gr_name)
 
                self._compare_error(id_, expected, given=response.body)
 
        finally:
 
            fixture.destroy_user_group(gr_name)
 

	
 
    @parameterized.expand([('none', 'repository.none'),
 
                           ('read', 'repository.read'),
 
                           ('write', 'repository.write'),
 
                           ('admin', 'repository.admin')])
 
    def test_api_grant_user_permission(self, name, perm):
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
 
                perm, TEST_USER_ADMIN_LOGIN, self.REPO
 
            ),
 
            'success': True
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_grant_user_permission_wrong_permission(self):
 
        perm = 'haha.no.permission'
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        expected = 'permission `%s` does not exist' % perm
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'grant_user_permission', crash)
 
    def test_api_grant_user_permission_exception_when_adding(self):
 
        perm = 'repository.read'
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to edit permission for user: `%s` in repo: `%s`' % (
 
            TEST_USER_ADMIN_LOGIN, self.REPO
 
        )
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_revoke_user_permission(self):
 
        id_, params = _build_data(self.apikey,
 
                                  'revoke_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=TEST_USER_ADMIN_LOGIN, )
 
        response = api_call(self, params)
 

	
 
        expected = {
 
            'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
 
                TEST_USER_ADMIN_LOGIN, self.REPO
 
            ),
 
            'success': True
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'revoke_user_permission', crash)
 
    def test_api_revoke_user_permission_exception_when_adding(self):
 
        id_, params = _build_data(self.apikey,
 
                                  'revoke_user_permission',
 
                                  repoid=self.REPO,
 
                                  userid=TEST_USER_ADMIN_LOGIN, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to edit permission for user: `%s` in repo: `%s`' % (
 
            TEST_USER_ADMIN_LOGIN, self.REPO
 
        )
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([('none', 'repository.none'),
 
                           ('read', 'repository.read'),
 
                           ('write', 'repository.write'),
 
                           ('admin', 'repository.admin')])
 
    def test_api_grant_user_group_permission(self, name, perm):
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_group_permission',
 
                                  repoid=self.REPO,
 
                                  usergroupid=TEST_USER_GROUP,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Granted perm: `%s` for user group: `%s` in repo: `%s`' % (
 
                perm, TEST_USER_GROUP, self.REPO
 
            ),
 
            'success': True
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    def test_api_grant_user_group_permission_wrong_permission(self):
 
        perm = 'haha.no.permission'
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_group_permission',
 
                                  repoid=self.REPO,
 
                                  usergroupid=TEST_USER_GROUP,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        expected = 'permission `%s` does not exist' % perm
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'grant_user_group_permission', crash)
 
    def test_api_grant_user_group_permission_exception_when_adding(self):
 
        perm = 'repository.read'
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_group_permission',
 
                                  repoid=self.REPO,
 
                                  usergroupid=TEST_USER_GROUP,
 
                                  perm=perm)
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to edit permission for user group: `%s` in repo: `%s`' % (
 
            TEST_USER_GROUP, self.REPO
 
        )
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    def test_api_revoke_user_group_permission(self):
 
        RepoModel().grant_user_group_permission(repo=self.REPO,
 
                                                group_name=TEST_USER_GROUP,
 
                                                perm='repository.read')
 
        Session().commit()
 
        id_, params = _build_data(self.apikey,
 
                                  'revoke_user_group_permission',
 
                                  repoid=self.REPO,
 
                                  usergroupid=TEST_USER_GROUP, )
 
        response = api_call(self, params)
 

	
 
        expected = {
 
            'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
 
                TEST_USER_GROUP, self.REPO
 
            ),
 
            'success': True
 
        }
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @mock.patch.object(RepoModel, 'revoke_user_group_permission', crash)
 
    def test_api_revoke_user_group_permission_exception_when_adding(self):
 
        id_, params = _build_data(self.apikey,
 
                                  'revoke_user_group_permission',
 
                                  repoid=self.REPO,
 
                                  usergroupid=TEST_USER_GROUP, )
 
        response = api_call(self, params)
 

	
 
        expected = 'failed to edit permission for user group: `%s` in repo: `%s`' % (
 
            TEST_USER_GROUP, self.REPO
 
        )
 
        self._compare_error(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([
 
        ('none', 'group.none', 'none'),
 
        ('read', 'group.read', 'none'),
 
        ('write', 'group.write', 'none'),
 
        ('admin', 'group.admin', 'none'),
 

	
 
        ('none', 'group.none', 'all'),
 
        ('read', 'group.read', 'all'),
 
        ('write', 'group.write', 'all'),
 
        ('admin', 'group.admin', 'all'),
 

	
 
        ('none', 'group.none', 'repos'),
 
        ('read', 'group.read', 'repos'),
 
        ('write', 'group.write', 'repos'),
 
        ('admin', 'group.admin', 'repos'),
 

	
 
        ('none', 'group.none', 'groups'),
 
        ('read', 'group.read', 'groups'),
 
        ('write', 'group.write', 'groups'),
 
        ('admin', 'group.admin', 'groups'),
 
    ])
 
    def test_api_grant_user_permission_to_repo_group(self, name, perm, apply_to_children):
 
        id_, params = _build_data(self.apikey,
 
                                  'grant_user_permission_to_repo_group',
 
                                  repogroupid=TEST_REPO_GROUP,
 
                                  userid=TEST_USER_ADMIN_LOGIN,
 
                                  perm=perm, apply_to_children=apply_to_children)
 
        response = api_call(self, params)
 

	
 
        ret = {
 
            'msg': 'Granted perm: `%s` (recursive:%s) for user: `%s` in repo group: `%s`' % (
 
                perm, apply_to_children, TEST_USER_ADMIN_LOGIN, TEST_REPO_GROUP
 
            ),
 
            'success': True
 
        }
 
        expected = ret
 
        self._compare_ok(id_, expected, given=response.body)
 

	
 
    @parameterized.expand([
 
        ('none_fails', 'group.none', 'none', False, False),
 
        ('read_fails', 'group.read', 'none', False, False),

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)